PR middle-end/66867
[official-gcc.git] / gcc / gimplify.c
blob47c4d253e41ddb49d21d7c19c7f2b9201866bf9e
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa.h"
54 #include "omp-low.h"
55 #include "gimple-low.h"
56 #include "cilk.h"
57 #include "gomp-constants.h"
58 #include "tree-dump.h"
59 #include "gimple-walk.h"
60 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
61 #include "builtins.h"
63 enum gimplify_omp_var_data
65 GOVD_SEEN = 1,
66 GOVD_EXPLICIT = 2,
67 GOVD_SHARED = 4,
68 GOVD_PRIVATE = 8,
69 GOVD_FIRSTPRIVATE = 16,
70 GOVD_LASTPRIVATE = 32,
71 GOVD_REDUCTION = 64,
72 GOVD_LOCAL = 128,
73 GOVD_MAP = 256,
74 GOVD_DEBUG_PRIVATE = 512,
75 GOVD_PRIVATE_OUTER_REF = 1024,
76 GOVD_LINEAR = 2048,
77 GOVD_ALIGNED = 4096,
79 /* Flag for GOVD_MAP: don't copy back. */
80 GOVD_MAP_TO_ONLY = 8192,
82 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
83 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
85 GOVD_MAP_0LEN_ARRAY = 32768,
87 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
88 GOVD_MAP_ALWAYS_TO = 65536,
90 /* Flag for shared vars that are or might be stored to in the region. */
91 GOVD_WRITTEN = 131072,
93 /* Flag for GOVD_MAP, if it is a forced mapping. */
94 GOVD_MAP_FORCE = 262144,
96 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
97 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
98 | GOVD_LOCAL)
102 enum omp_region_type
104 ORT_WORKSHARE = 0x00,
105 ORT_SIMD = 0x01,
107 ORT_PARALLEL = 0x02,
108 ORT_COMBINED_PARALLEL = 0x03,
110 ORT_TASK = 0x04,
111 ORT_UNTIED_TASK = 0x05,
113 ORT_TEAMS = 0x08,
114 ORT_COMBINED_TEAMS = 0x09,
116 /* Data region. */
117 ORT_TARGET_DATA = 0x10,
119 /* Data region with offloading. */
120 ORT_TARGET = 0x20,
121 ORT_COMBINED_TARGET = 0x21,
123 /* OpenACC variants. */
124 ORT_ACC = 0x40, /* A generic OpenACC region. */
125 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
126 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
127 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
128 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
130 /* Dummy OpenMP region, used to disable expansion of
131 DECL_VALUE_EXPRs in taskloop pre body. */
132 ORT_NONE = 0x100
135 /* Gimplify hashtable helper. */
137 struct gimplify_hasher : free_ptr_hash <elt_t>
139 static inline hashval_t hash (const elt_t *);
140 static inline bool equal (const elt_t *, const elt_t *);
143 struct gimplify_ctx
145 struct gimplify_ctx *prev_context;
147 vec<gbind *> bind_expr_stack;
148 tree temps;
149 gimple_seq conditional_cleanups;
150 tree exit_label;
151 tree return_temp;
153 vec<tree> case_labels;
154 /* The formal temporary table. Should this be persistent? */
155 hash_table<gimplify_hasher> *temp_htab;
157 int conditions;
158 unsigned into_ssa : 1;
159 unsigned allow_rhs_cond_expr : 1;
160 unsigned in_cleanup_point_expr : 1;
161 unsigned keep_stack : 1;
162 unsigned save_stack : 1;
165 struct gimplify_omp_ctx
167 struct gimplify_omp_ctx *outer_context;
168 splay_tree variables;
169 hash_set<tree> *privatized_types;
170 /* Iteration variables in an OMP_FOR. */
171 vec<tree> loop_iter_var;
172 location_t location;
173 enum omp_clause_default_kind default_kind;
174 enum omp_region_type region_type;
175 bool combined_loop;
176 bool distribute;
177 bool target_map_scalars_firstprivate;
178 bool target_map_pointers_as_0len_arrays;
179 bool target_firstprivatize_array_bases;
182 static struct gimplify_ctx *gimplify_ctxp;
183 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
185 /* Forward declaration. */
186 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
187 static hash_map<tree, tree> *oacc_declare_returns;
188 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
189 bool (*) (tree), fallback_t, bool);
191 /* Shorter alias name for the above function for use in gimplify.c
192 only. */
194 static inline void
195 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
197 gimple_seq_add_stmt_without_update (seq_p, gs);
200 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
201 NULL, a new sequence is allocated. This function is
202 similar to gimple_seq_add_seq, but does not scan the operands.
203 During gimplification, we need to manipulate statement sequences
204 before the def/use vectors have been constructed. */
206 static void
207 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
209 gimple_stmt_iterator si;
211 if (src == NULL)
212 return;
214 si = gsi_last (*dst_p);
215 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
219 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
220 and popping gimplify contexts. */
222 static struct gimplify_ctx *ctx_pool = NULL;
224 /* Return a gimplify context struct from the pool. */
226 static inline struct gimplify_ctx *
227 ctx_alloc (void)
229 struct gimplify_ctx * c = ctx_pool;
231 if (c)
232 ctx_pool = c->prev_context;
233 else
234 c = XNEW (struct gimplify_ctx);
236 memset (c, '\0', sizeof (*c));
237 return c;
240 /* Put gimplify context C back into the pool. */
242 static inline void
243 ctx_free (struct gimplify_ctx *c)
245 c->prev_context = ctx_pool;
246 ctx_pool = c;
249 /* Free allocated ctx stack memory. */
251 void
252 free_gimplify_stack (void)
254 struct gimplify_ctx *c;
256 while ((c = ctx_pool))
258 ctx_pool = c->prev_context;
259 free (c);
264 /* Set up a context for the gimplifier. */
266 void
267 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
269 struct gimplify_ctx *c = ctx_alloc ();
271 c->prev_context = gimplify_ctxp;
272 gimplify_ctxp = c;
273 gimplify_ctxp->into_ssa = in_ssa;
274 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
277 /* Tear down a context for the gimplifier. If BODY is non-null, then
278 put the temporaries into the outer BIND_EXPR. Otherwise, put them
279 in the local_decls.
281 BODY is not a sequence, but the first tuple in a sequence. */
283 void
284 pop_gimplify_context (gimple *body)
286 struct gimplify_ctx *c = gimplify_ctxp;
288 gcc_assert (c
289 && (!c->bind_expr_stack.exists ()
290 || c->bind_expr_stack.is_empty ()));
291 c->bind_expr_stack.release ();
292 gimplify_ctxp = c->prev_context;
294 if (body)
295 declare_vars (c->temps, body, false);
296 else
297 record_vars (c->temps);
299 delete c->temp_htab;
300 c->temp_htab = NULL;
301 ctx_free (c);
304 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
306 static void
307 gimple_push_bind_expr (gbind *bind_stmt)
309 gimplify_ctxp->bind_expr_stack.reserve (8);
310 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
313 /* Pop the first element off the stack of bindings. */
315 static void
316 gimple_pop_bind_expr (void)
318 gimplify_ctxp->bind_expr_stack.pop ();
321 /* Return the first element of the stack of bindings. */
323 gbind *
324 gimple_current_bind_expr (void)
326 return gimplify_ctxp->bind_expr_stack.last ();
329 /* Return the stack of bindings created during gimplification. */
331 vec<gbind *>
332 gimple_bind_expr_stack (void)
334 return gimplify_ctxp->bind_expr_stack;
337 /* Return true iff there is a COND_EXPR between us and the innermost
338 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
340 static bool
341 gimple_conditional_context (void)
343 return gimplify_ctxp->conditions > 0;
346 /* Note that we've entered a COND_EXPR. */
348 static void
349 gimple_push_condition (void)
351 #ifdef ENABLE_GIMPLE_CHECKING
352 if (gimplify_ctxp->conditions == 0)
353 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
354 #endif
355 ++(gimplify_ctxp->conditions);
358 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
359 now, add any conditional cleanups we've seen to the prequeue. */
361 static void
362 gimple_pop_condition (gimple_seq *pre_p)
364 int conds = --(gimplify_ctxp->conditions);
366 gcc_assert (conds >= 0);
367 if (conds == 0)
369 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
370 gimplify_ctxp->conditional_cleanups = NULL;
374 /* A stable comparison routine for use with splay trees and DECLs. */
376 static int
377 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
379 tree a = (tree) xa;
380 tree b = (tree) xb;
382 return DECL_UID (a) - DECL_UID (b);
385 /* Create a new omp construct that deals with variable remapping. */
387 static struct gimplify_omp_ctx *
388 new_omp_context (enum omp_region_type region_type)
390 struct gimplify_omp_ctx *c;
392 c = XCNEW (struct gimplify_omp_ctx);
393 c->outer_context = gimplify_omp_ctxp;
394 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
395 c->privatized_types = new hash_set<tree>;
396 c->location = input_location;
397 c->region_type = region_type;
398 if ((region_type & ORT_TASK) == 0)
399 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
400 else
401 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
403 return c;
406 /* Destroy an omp construct that deals with variable remapping. */
408 static void
409 delete_omp_context (struct gimplify_omp_ctx *c)
411 splay_tree_delete (c->variables);
412 delete c->privatized_types;
413 c->loop_iter_var.release ();
414 XDELETE (c);
417 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
418 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
420 /* Both gimplify the statement T and append it to *SEQ_P. This function
421 behaves exactly as gimplify_stmt, but you don't have to pass T as a
422 reference. */
424 void
425 gimplify_and_add (tree t, gimple_seq *seq_p)
427 gimplify_stmt (&t, seq_p);
430 /* Gimplify statement T into sequence *SEQ_P, and return the first
431 tuple in the sequence of generated tuples for this statement.
432 Return NULL if gimplifying T produced no tuples. */
434 static gimple *
435 gimplify_and_return_first (tree t, gimple_seq *seq_p)
437 gimple_stmt_iterator last = gsi_last (*seq_p);
439 gimplify_and_add (t, seq_p);
441 if (!gsi_end_p (last))
443 gsi_next (&last);
444 return gsi_stmt (last);
446 else
447 return gimple_seq_first_stmt (*seq_p);
450 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
451 LHS, or for a call argument. */
453 static bool
454 is_gimple_mem_rhs (tree t)
456 /* If we're dealing with a renamable type, either source or dest must be
457 a renamed variable. */
458 if (is_gimple_reg_type (TREE_TYPE (t)))
459 return is_gimple_val (t);
460 else
461 return is_gimple_val (t) || is_gimple_lvalue (t);
464 /* Return true if T is a CALL_EXPR or an expression that can be
465 assigned to a temporary. Note that this predicate should only be
466 used during gimplification. See the rationale for this in
467 gimplify_modify_expr. */
469 static bool
470 is_gimple_reg_rhs_or_call (tree t)
472 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
473 || TREE_CODE (t) == CALL_EXPR);
476 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
477 this predicate should only be used during gimplification. See the
478 rationale for this in gimplify_modify_expr. */
480 static bool
481 is_gimple_mem_rhs_or_call (tree t)
483 /* If we're dealing with a renamable type, either source or dest must be
484 a renamed variable. */
485 if (is_gimple_reg_type (TREE_TYPE (t)))
486 return is_gimple_val (t);
487 else
488 return (is_gimple_val (t) || is_gimple_lvalue (t)
489 || TREE_CODE (t) == CALL_EXPR);
492 /* Create a temporary with a name derived from VAL. Subroutine of
493 lookup_tmp_var; nobody else should call this function. */
495 static inline tree
496 create_tmp_from_val (tree val)
498 /* Drop all qualifiers and address-space information from the value type. */
499 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
500 tree var = create_tmp_var (type, get_name (val));
501 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
502 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
503 DECL_GIMPLE_REG_P (var) = 1;
504 return var;
507 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
508 an existing expression temporary. */
510 static tree
511 lookup_tmp_var (tree val, bool is_formal)
513 tree ret;
515 /* If not optimizing, never really reuse a temporary. local-alloc
516 won't allocate any variable that is used in more than one basic
517 block, which means it will go into memory, causing much extra
518 work in reload and final and poorer code generation, outweighing
519 the extra memory allocation here. */
520 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
521 ret = create_tmp_from_val (val);
522 else
524 elt_t elt, *elt_p;
525 elt_t **slot;
527 elt.val = val;
528 if (!gimplify_ctxp->temp_htab)
529 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
530 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
531 if (*slot == NULL)
533 elt_p = XNEW (elt_t);
534 elt_p->val = val;
535 elt_p->temp = ret = create_tmp_from_val (val);
536 *slot = elt_p;
538 else
540 elt_p = *slot;
541 ret = elt_p->temp;
545 return ret;
548 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
550 static tree
551 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
552 bool is_formal, bool allow_ssa)
554 tree t, mod;
556 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
557 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
558 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
559 fb_rvalue);
561 if (allow_ssa
562 && gimplify_ctxp->into_ssa
563 && is_gimple_reg_type (TREE_TYPE (val)))
565 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
566 if (! gimple_in_ssa_p (cfun))
568 const char *name = get_name (val);
569 if (name)
570 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
573 else
574 t = lookup_tmp_var (val, is_formal);
576 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
578 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
580 /* gimplify_modify_expr might want to reduce this further. */
581 gimplify_and_add (mod, pre_p);
582 ggc_free (mod);
584 return t;
587 /* Return a formal temporary variable initialized with VAL. PRE_P is as
588 in gimplify_expr. Only use this function if:
590 1) The value of the unfactored expression represented by VAL will not
591 change between the initialization and use of the temporary, and
592 2) The temporary will not be otherwise modified.
594 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
595 and #2 means it is inappropriate for && temps.
597 For other cases, use get_initialized_tmp_var instead. */
599 tree
600 get_formal_tmp_var (tree val, gimple_seq *pre_p)
602 return internal_get_tmp_var (val, pre_p, NULL, true, true);
605 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
606 are as in gimplify_expr. */
608 tree
609 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
610 bool allow_ssa)
612 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
615 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
616 generate debug info for them; otherwise don't. */
618 void
619 declare_vars (tree vars, gimple *gs, bool debug_info)
621 tree last = vars;
622 if (last)
624 tree temps, block;
626 gbind *scope = as_a <gbind *> (gs);
628 temps = nreverse (last);
630 block = gimple_bind_block (scope);
631 gcc_assert (!block || TREE_CODE (block) == BLOCK);
632 if (!block || !debug_info)
634 DECL_CHAIN (last) = gimple_bind_vars (scope);
635 gimple_bind_set_vars (scope, temps);
637 else
639 /* We need to attach the nodes both to the BIND_EXPR and to its
640 associated BLOCK for debugging purposes. The key point here
641 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
642 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
643 if (BLOCK_VARS (block))
644 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
645 else
647 gimple_bind_set_vars (scope,
648 chainon (gimple_bind_vars (scope), temps));
649 BLOCK_VARS (block) = temps;
655 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
656 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
657 no such upper bound can be obtained. */
659 static void
660 force_constant_size (tree var)
662 /* The only attempt we make is by querying the maximum size of objects
663 of the variable's type. */
665 HOST_WIDE_INT max_size;
667 gcc_assert (TREE_CODE (var) == VAR_DECL);
669 max_size = max_int_size_in_bytes (TREE_TYPE (var));
671 gcc_assert (max_size >= 0);
673 DECL_SIZE_UNIT (var)
674 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
675 DECL_SIZE (var)
676 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
679 /* Push the temporary variable TMP into the current binding. */
681 void
682 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
684 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
686 /* Later processing assumes that the object size is constant, which might
687 not be true at this point. Force the use of a constant upper bound in
688 this case. */
689 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
690 force_constant_size (tmp);
692 DECL_CONTEXT (tmp) = fn->decl;
693 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
695 record_vars_into (tmp, fn->decl);
698 /* Push the temporary variable TMP into the current binding. */
700 void
701 gimple_add_tmp_var (tree tmp)
703 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
705 /* Later processing assumes that the object size is constant, which might
706 not be true at this point. Force the use of a constant upper bound in
707 this case. */
708 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
709 force_constant_size (tmp);
711 DECL_CONTEXT (tmp) = current_function_decl;
712 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
714 if (gimplify_ctxp)
716 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
717 gimplify_ctxp->temps = tmp;
719 /* Mark temporaries local within the nearest enclosing parallel. */
720 if (gimplify_omp_ctxp)
722 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
723 while (ctx
724 && (ctx->region_type == ORT_WORKSHARE
725 || ctx->region_type == ORT_SIMD
726 || ctx->region_type == ORT_ACC))
727 ctx = ctx->outer_context;
728 if (ctx)
729 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
732 else if (cfun)
733 record_vars (tmp);
734 else
736 gimple_seq body_seq;
738 /* This case is for nested functions. We need to expose the locals
739 they create. */
740 body_seq = gimple_body (current_function_decl);
741 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
747 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
748 nodes that are referenced more than once in GENERIC functions. This is
749 necessary because gimplification (translation into GIMPLE) is performed
750 by modifying tree nodes in-place, so gimplication of a shared node in a
751 first context could generate an invalid GIMPLE form in a second context.
753 This is achieved with a simple mark/copy/unmark algorithm that walks the
754 GENERIC representation top-down, marks nodes with TREE_VISITED the first
755 time it encounters them, duplicates them if they already have TREE_VISITED
756 set, and finally removes the TREE_VISITED marks it has set.
758 The algorithm works only at the function level, i.e. it generates a GENERIC
759 representation of a function with no nodes shared within the function when
760 passed a GENERIC function (except for nodes that are allowed to be shared).
762 At the global level, it is also necessary to unshare tree nodes that are
763 referenced in more than one function, for the same aforementioned reason.
764 This requires some cooperation from the front-end. There are 2 strategies:
766 1. Manual unsharing. The front-end needs to call unshare_expr on every
767 expression that might end up being shared across functions.
769 2. Deep unsharing. This is an extension of regular unsharing. Instead
770 of calling unshare_expr on expressions that might be shared across
771 functions, the front-end pre-marks them with TREE_VISITED. This will
772 ensure that they are unshared on the first reference within functions
773 when the regular unsharing algorithm runs. The counterpart is that
774 this algorithm must look deeper than for manual unsharing, which is
775 specified by LANG_HOOKS_DEEP_UNSHARING.
777 If there are only few specific cases of node sharing across functions, it is
778 probably easier for a front-end to unshare the expressions manually. On the
779 contrary, if the expressions generated at the global level are as widespread
780 as expressions generated within functions, deep unsharing is very likely the
781 way to go. */
783 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
784 These nodes model computations that must be done once. If we were to
785 unshare something like SAVE_EXPR(i++), the gimplification process would
786 create wrong code. However, if DATA is non-null, it must hold a pointer
787 set that is used to unshare the subtrees of these nodes. */
789 static tree
790 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
792 tree t = *tp;
793 enum tree_code code = TREE_CODE (t);
795 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
796 copy their subtrees if we can make sure to do it only once. */
797 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
799 if (data && !((hash_set<tree> *)data)->add (t))
801 else
802 *walk_subtrees = 0;
805 /* Stop at types, decls, constants like copy_tree_r. */
806 else if (TREE_CODE_CLASS (code) == tcc_type
807 || TREE_CODE_CLASS (code) == tcc_declaration
808 || TREE_CODE_CLASS (code) == tcc_constant
809 /* We can't do anything sensible with a BLOCK used as an
810 expression, but we also can't just die when we see it
811 because of non-expression uses. So we avert our eyes
812 and cross our fingers. Silly Java. */
813 || code == BLOCK)
814 *walk_subtrees = 0;
816 /* Cope with the statement expression extension. */
817 else if (code == STATEMENT_LIST)
820 /* Leave the bulk of the work to copy_tree_r itself. */
821 else
822 copy_tree_r (tp, walk_subtrees, NULL);
824 return NULL_TREE;
827 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
828 If *TP has been visited already, then *TP is deeply copied by calling
829 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
831 static tree
832 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
834 tree t = *tp;
835 enum tree_code code = TREE_CODE (t);
837 /* Skip types, decls, and constants. But we do want to look at their
838 types and the bounds of types. Mark them as visited so we properly
839 unmark their subtrees on the unmark pass. If we've already seen them,
840 don't look down further. */
841 if (TREE_CODE_CLASS (code) == tcc_type
842 || TREE_CODE_CLASS (code) == tcc_declaration
843 || TREE_CODE_CLASS (code) == tcc_constant)
845 if (TREE_VISITED (t))
846 *walk_subtrees = 0;
847 else
848 TREE_VISITED (t) = 1;
851 /* If this node has been visited already, unshare it and don't look
852 any deeper. */
853 else if (TREE_VISITED (t))
855 walk_tree (tp, mostly_copy_tree_r, data, NULL);
856 *walk_subtrees = 0;
859 /* Otherwise, mark the node as visited and keep looking. */
860 else
861 TREE_VISITED (t) = 1;
863 return NULL_TREE;
866 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
867 copy_if_shared_r callback unmodified. */
869 static inline void
870 copy_if_shared (tree *tp, void *data)
872 walk_tree (tp, copy_if_shared_r, data, NULL);
875 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
876 any nested functions. */
878 static void
879 unshare_body (tree fndecl)
881 struct cgraph_node *cgn = cgraph_node::get (fndecl);
882 /* If the language requires deep unsharing, we need a pointer set to make
883 sure we don't repeatedly unshare subtrees of unshareable nodes. */
884 hash_set<tree> *visited
885 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
887 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
888 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
889 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
891 delete visited;
893 if (cgn)
894 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
895 unshare_body (cgn->decl);
898 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
899 Subtrees are walked until the first unvisited node is encountered. */
901 static tree
902 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
904 tree t = *tp;
906 /* If this node has been visited, unmark it and keep looking. */
907 if (TREE_VISITED (t))
908 TREE_VISITED (t) = 0;
910 /* Otherwise, don't look any deeper. */
911 else
912 *walk_subtrees = 0;
914 return NULL_TREE;
917 /* Unmark the visited trees rooted at *TP. */
919 static inline void
920 unmark_visited (tree *tp)
922 walk_tree (tp, unmark_visited_r, NULL, NULL);
925 /* Likewise, but mark all trees as not visited. */
927 static void
928 unvisit_body (tree fndecl)
930 struct cgraph_node *cgn = cgraph_node::get (fndecl);
932 unmark_visited (&DECL_SAVED_TREE (fndecl));
933 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
934 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
936 if (cgn)
937 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
938 unvisit_body (cgn->decl);
941 /* Unconditionally make an unshared copy of EXPR. This is used when using
942 stored expressions which span multiple functions, such as BINFO_VTABLE,
943 as the normal unsharing process can't tell that they're shared. */
945 tree
946 unshare_expr (tree expr)
948 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
949 return expr;
952 /* Worker for unshare_expr_without_location. */
954 static tree
955 prune_expr_location (tree *tp, int *walk_subtrees, void *)
957 if (EXPR_P (*tp))
958 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
959 else
960 *walk_subtrees = 0;
961 return NULL_TREE;
964 /* Similar to unshare_expr but also prune all expression locations
965 from EXPR. */
967 tree
968 unshare_expr_without_location (tree expr)
970 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
971 if (EXPR_P (expr))
972 walk_tree (&expr, prune_expr_location, NULL, NULL);
973 return expr;
976 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
977 contain statements and have a value. Assign its value to a temporary
978 and give it void_type_node. Return the temporary, or NULL_TREE if
979 WRAPPER was already void. */
981 tree
982 voidify_wrapper_expr (tree wrapper, tree temp)
984 tree type = TREE_TYPE (wrapper);
985 if (type && !VOID_TYPE_P (type))
987 tree *p;
989 /* Set p to point to the body of the wrapper. Loop until we find
990 something that isn't a wrapper. */
991 for (p = &wrapper; p && *p; )
993 switch (TREE_CODE (*p))
995 case BIND_EXPR:
996 TREE_SIDE_EFFECTS (*p) = 1;
997 TREE_TYPE (*p) = void_type_node;
998 /* For a BIND_EXPR, the body is operand 1. */
999 p = &BIND_EXPR_BODY (*p);
1000 break;
1002 case CLEANUP_POINT_EXPR:
1003 case TRY_FINALLY_EXPR:
1004 case TRY_CATCH_EXPR:
1005 TREE_SIDE_EFFECTS (*p) = 1;
1006 TREE_TYPE (*p) = void_type_node;
1007 p = &TREE_OPERAND (*p, 0);
1008 break;
1010 case STATEMENT_LIST:
1012 tree_stmt_iterator i = tsi_last (*p);
1013 TREE_SIDE_EFFECTS (*p) = 1;
1014 TREE_TYPE (*p) = void_type_node;
1015 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1017 break;
1019 case COMPOUND_EXPR:
1020 /* Advance to the last statement. Set all container types to
1021 void. */
1022 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1024 TREE_SIDE_EFFECTS (*p) = 1;
1025 TREE_TYPE (*p) = void_type_node;
1027 break;
1029 case TRANSACTION_EXPR:
1030 TREE_SIDE_EFFECTS (*p) = 1;
1031 TREE_TYPE (*p) = void_type_node;
1032 p = &TRANSACTION_EXPR_BODY (*p);
1033 break;
1035 default:
1036 /* Assume that any tree upon which voidify_wrapper_expr is
1037 directly called is a wrapper, and that its body is op0. */
1038 if (p == &wrapper)
1040 TREE_SIDE_EFFECTS (*p) = 1;
1041 TREE_TYPE (*p) = void_type_node;
1042 p = &TREE_OPERAND (*p, 0);
1043 break;
1045 goto out;
1049 out:
1050 if (p == NULL || IS_EMPTY_STMT (*p))
1051 temp = NULL_TREE;
1052 else if (temp)
1054 /* The wrapper is on the RHS of an assignment that we're pushing
1055 down. */
1056 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1057 || TREE_CODE (temp) == MODIFY_EXPR);
1058 TREE_OPERAND (temp, 1) = *p;
1059 *p = temp;
1061 else
1063 temp = create_tmp_var (type, "retval");
1064 *p = build2 (INIT_EXPR, type, temp, *p);
1067 return temp;
1070 return NULL_TREE;
1073 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1074 a temporary through which they communicate. */
1076 static void
1077 build_stack_save_restore (gcall **save, gcall **restore)
1079 tree tmp_var;
1081 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1082 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1083 gimple_call_set_lhs (*save, tmp_var);
1085 *restore
1086 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1087 1, tmp_var);
1090 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1092 static enum gimplify_status
1093 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1095 tree bind_expr = *expr_p;
1096 bool old_keep_stack = gimplify_ctxp->keep_stack;
1097 bool old_save_stack = gimplify_ctxp->save_stack;
1098 tree t;
1099 gbind *bind_stmt;
1100 gimple_seq body, cleanup;
1101 gcall *stack_save;
1102 location_t start_locus = 0, end_locus = 0;
1103 tree ret_clauses = NULL;
1105 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1107 /* Mark variables seen in this bind expr. */
1108 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1110 if (TREE_CODE (t) == VAR_DECL)
1112 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1114 /* Mark variable as local. */
1115 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1116 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1117 || splay_tree_lookup (ctx->variables,
1118 (splay_tree_key) t) == NULL))
1120 if (ctx->region_type == ORT_SIMD
1121 && TREE_ADDRESSABLE (t)
1122 && !TREE_STATIC (t))
1123 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1124 else
1125 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1128 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1130 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1131 cfun->has_local_explicit_reg_vars = true;
1134 /* Preliminarily mark non-addressed complex variables as eligible
1135 for promotion to gimple registers. We'll transform their uses
1136 as we find them. */
1137 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1138 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1139 && !TREE_THIS_VOLATILE (t)
1140 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1141 && !needs_to_live_in_memory (t))
1142 DECL_GIMPLE_REG_P (t) = 1;
1145 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1146 BIND_EXPR_BLOCK (bind_expr));
1147 gimple_push_bind_expr (bind_stmt);
1149 gimplify_ctxp->keep_stack = false;
1150 gimplify_ctxp->save_stack = false;
1152 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1153 body = NULL;
1154 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1155 gimple_bind_set_body (bind_stmt, body);
1157 /* Source location wise, the cleanup code (stack_restore and clobbers)
1158 belongs to the end of the block, so propagate what we have. The
1159 stack_save operation belongs to the beginning of block, which we can
1160 infer from the bind_expr directly if the block has no explicit
1161 assignment. */
1162 if (BIND_EXPR_BLOCK (bind_expr))
1164 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1165 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1167 if (start_locus == 0)
1168 start_locus = EXPR_LOCATION (bind_expr);
1170 cleanup = NULL;
1171 stack_save = NULL;
1173 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1174 the stack space allocated to the VLAs. */
1175 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1177 gcall *stack_restore;
1179 /* Save stack on entry and restore it on exit. Add a try_finally
1180 block to achieve this. */
1181 build_stack_save_restore (&stack_save, &stack_restore);
1183 gimple_set_location (stack_save, start_locus);
1184 gimple_set_location (stack_restore, end_locus);
1186 gimplify_seq_add_stmt (&cleanup, stack_restore);
1189 /* Add clobbers for all variables that go out of scope. */
1190 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1192 if (TREE_CODE (t) == VAR_DECL
1193 && !is_global_var (t)
1194 && DECL_CONTEXT (t) == current_function_decl
1195 && !DECL_HARD_REGISTER (t)
1196 && !TREE_THIS_VOLATILE (t)
1197 && !DECL_HAS_VALUE_EXPR_P (t)
1198 /* Only care for variables that have to be in memory. Others
1199 will be rewritten into SSA names, hence moved to the top-level. */
1200 && !is_gimple_reg (t)
1201 && flag_stack_reuse != SR_NONE)
1203 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1204 gimple *clobber_stmt;
1205 TREE_THIS_VOLATILE (clobber) = 1;
1206 clobber_stmt = gimple_build_assign (t, clobber);
1207 gimple_set_location (clobber_stmt, end_locus);
1208 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1210 if (flag_openacc && oacc_declare_returns != NULL)
1212 tree *c = oacc_declare_returns->get (t);
1213 if (c != NULL)
1215 if (ret_clauses)
1216 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1218 ret_clauses = *c;
1220 oacc_declare_returns->remove (t);
1222 if (oacc_declare_returns->elements () == 0)
1224 delete oacc_declare_returns;
1225 oacc_declare_returns = NULL;
1232 if (ret_clauses)
1234 gomp_target *stmt;
1235 gimple_stmt_iterator si = gsi_start (cleanup);
1237 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1238 ret_clauses);
1239 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1242 if (cleanup)
1244 gtry *gs;
1245 gimple_seq new_body;
1247 new_body = NULL;
1248 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1249 GIMPLE_TRY_FINALLY);
1251 if (stack_save)
1252 gimplify_seq_add_stmt (&new_body, stack_save);
1253 gimplify_seq_add_stmt (&new_body, gs);
1254 gimple_bind_set_body (bind_stmt, new_body);
1257 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1258 if (!gimplify_ctxp->keep_stack)
1259 gimplify_ctxp->keep_stack = old_keep_stack;
1260 gimplify_ctxp->save_stack = old_save_stack;
1262 gimple_pop_bind_expr ();
1264 gimplify_seq_add_stmt (pre_p, bind_stmt);
1266 if (temp)
1268 *expr_p = temp;
1269 return GS_OK;
1272 *expr_p = NULL_TREE;
1273 return GS_ALL_DONE;
1276 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1277 GIMPLE value, it is assigned to a new temporary and the statement is
1278 re-written to return the temporary.
1280 PRE_P points to the sequence where side effects that must happen before
1281 STMT should be stored. */
1283 static enum gimplify_status
1284 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1286 greturn *ret;
1287 tree ret_expr = TREE_OPERAND (stmt, 0);
1288 tree result_decl, result;
1290 if (ret_expr == error_mark_node)
1291 return GS_ERROR;
1293 /* Implicit _Cilk_sync must be inserted right before any return statement
1294 if there is a _Cilk_spawn in the function. If the user has provided a
1295 _Cilk_sync, the optimizer should remove this duplicate one. */
1296 if (fn_contains_cilk_spawn_p (cfun))
1298 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1299 gimplify_and_add (impl_sync, pre_p);
1302 if (!ret_expr
1303 || TREE_CODE (ret_expr) == RESULT_DECL
1304 || ret_expr == error_mark_node)
1306 greturn *ret = gimple_build_return (ret_expr);
1307 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1308 gimplify_seq_add_stmt (pre_p, ret);
1309 return GS_ALL_DONE;
1312 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1313 result_decl = NULL_TREE;
1314 else
1316 result_decl = TREE_OPERAND (ret_expr, 0);
1318 /* See through a return by reference. */
1319 if (TREE_CODE (result_decl) == INDIRECT_REF)
1320 result_decl = TREE_OPERAND (result_decl, 0);
1322 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1323 || TREE_CODE (ret_expr) == INIT_EXPR)
1324 && TREE_CODE (result_decl) == RESULT_DECL);
1327 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1328 Recall that aggregate_value_p is FALSE for any aggregate type that is
1329 returned in registers. If we're returning values in registers, then
1330 we don't want to extend the lifetime of the RESULT_DECL, particularly
1331 across another call. In addition, for those aggregates for which
1332 hard_function_value generates a PARALLEL, we'll die during normal
1333 expansion of structure assignments; there's special code in expand_return
1334 to handle this case that does not exist in expand_expr. */
1335 if (!result_decl)
1336 result = NULL_TREE;
1337 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1339 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1341 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1342 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1343 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1344 should be effectively allocated by the caller, i.e. all calls to
1345 this function must be subject to the Return Slot Optimization. */
1346 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1347 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1349 result = result_decl;
1351 else if (gimplify_ctxp->return_temp)
1352 result = gimplify_ctxp->return_temp;
1353 else
1355 result = create_tmp_reg (TREE_TYPE (result_decl));
1357 /* ??? With complex control flow (usually involving abnormal edges),
1358 we can wind up warning about an uninitialized value for this. Due
1359 to how this variable is constructed and initialized, this is never
1360 true. Give up and never warn. */
1361 TREE_NO_WARNING (result) = 1;
1363 gimplify_ctxp->return_temp = result;
1366 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1367 Then gimplify the whole thing. */
1368 if (result != result_decl)
1369 TREE_OPERAND (ret_expr, 0) = result;
1371 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1373 ret = gimple_build_return (result);
1374 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1375 gimplify_seq_add_stmt (pre_p, ret);
1377 return GS_ALL_DONE;
1380 /* Gimplify a variable-length array DECL. */
1382 static void
1383 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1385 /* This is a variable-sized decl. Simplify its size and mark it
1386 for deferred expansion. */
1387 tree t, addr, ptr_type;
1389 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1390 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1392 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1393 if (DECL_HAS_VALUE_EXPR_P (decl))
1394 return;
1396 /* All occurrences of this decl in final gimplified code will be
1397 replaced by indirection. Setting DECL_VALUE_EXPR does two
1398 things: First, it lets the rest of the gimplifier know what
1399 replacement to use. Second, it lets the debug info know
1400 where to find the value. */
1401 ptr_type = build_pointer_type (TREE_TYPE (decl));
1402 addr = create_tmp_var (ptr_type, get_name (decl));
1403 DECL_IGNORED_P (addr) = 0;
1404 t = build_fold_indirect_ref (addr);
1405 TREE_THIS_NOTRAP (t) = 1;
1406 SET_DECL_VALUE_EXPR (decl, t);
1407 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1409 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1410 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1411 size_int (DECL_ALIGN (decl)));
1412 /* The call has been built for a variable-sized object. */
1413 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1414 t = fold_convert (ptr_type, t);
1415 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1417 gimplify_and_add (t, seq_p);
1420 /* A helper function to be called via walk_tree. Mark all labels under *TP
1421 as being forced. To be called for DECL_INITIAL of static variables. */
1423 static tree
1424 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1426 if (TYPE_P (*tp))
1427 *walk_subtrees = 0;
1428 if (TREE_CODE (*tp) == LABEL_DECL)
1430 FORCED_LABEL (*tp) = 1;
1431 cfun->has_forced_label_in_static = 1;
1434 return NULL_TREE;
1437 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1438 and initialization explicit. */
1440 static enum gimplify_status
1441 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1443 tree stmt = *stmt_p;
1444 tree decl = DECL_EXPR_DECL (stmt);
1446 *stmt_p = NULL_TREE;
1448 if (TREE_TYPE (decl) == error_mark_node)
1449 return GS_ERROR;
1451 if ((TREE_CODE (decl) == TYPE_DECL
1452 || TREE_CODE (decl) == VAR_DECL)
1453 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1455 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1456 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1457 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1460 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1461 in case its size expressions contain problematic nodes like CALL_EXPR. */
1462 if (TREE_CODE (decl) == TYPE_DECL
1463 && DECL_ORIGINAL_TYPE (decl)
1464 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1466 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1467 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1468 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1471 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1473 tree init = DECL_INITIAL (decl);
1475 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1476 || (!TREE_STATIC (decl)
1477 && flag_stack_check == GENERIC_STACK_CHECK
1478 && compare_tree_int (DECL_SIZE_UNIT (decl),
1479 STACK_CHECK_MAX_VAR_SIZE) > 0))
1480 gimplify_vla_decl (decl, seq_p);
1482 /* Some front ends do not explicitly declare all anonymous
1483 artificial variables. We compensate here by declaring the
1484 variables, though it would be better if the front ends would
1485 explicitly declare them. */
1486 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1487 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1488 gimple_add_tmp_var (decl);
1490 if (init && init != error_mark_node)
1492 if (!TREE_STATIC (decl))
1494 DECL_INITIAL (decl) = NULL_TREE;
1495 init = build2 (INIT_EXPR, void_type_node, decl, init);
1496 gimplify_and_add (init, seq_p);
1497 ggc_free (init);
1499 else
1500 /* We must still examine initializers for static variables
1501 as they may contain a label address. */
1502 walk_tree (&init, force_labels_r, NULL, NULL);
1506 return GS_ALL_DONE;
1509 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1510 and replacing the LOOP_EXPR with goto, but if the loop contains an
1511 EXIT_EXPR, we need to append a label for it to jump to. */
1513 static enum gimplify_status
1514 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1516 tree saved_label = gimplify_ctxp->exit_label;
1517 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1519 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1521 gimplify_ctxp->exit_label = NULL_TREE;
1523 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1525 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1527 if (gimplify_ctxp->exit_label)
1528 gimplify_seq_add_stmt (pre_p,
1529 gimple_build_label (gimplify_ctxp->exit_label));
1531 gimplify_ctxp->exit_label = saved_label;
1533 *expr_p = NULL;
1534 return GS_ALL_DONE;
1537 /* Gimplify a statement list onto a sequence. These may be created either
1538 by an enlightened front-end, or by shortcut_cond_expr. */
1540 static enum gimplify_status
1541 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1543 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1545 tree_stmt_iterator i = tsi_start (*expr_p);
1547 while (!tsi_end_p (i))
1549 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1550 tsi_delink (&i);
1553 if (temp)
1555 *expr_p = temp;
1556 return GS_OK;
1559 return GS_ALL_DONE;
1562 /* Callback for walk_gimple_seq. */
1564 static tree
1565 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1566 struct walk_stmt_info *wi)
1568 gimple *stmt = gsi_stmt (*gsi_p);
1570 *handled_ops_p = true;
1571 switch (gimple_code (stmt))
1573 case GIMPLE_TRY:
1574 /* A compiler-generated cleanup or a user-written try block.
1575 If it's empty, don't dive into it--that would result in
1576 worse location info. */
1577 if (gimple_try_eval (stmt) == NULL)
1579 wi->info = stmt;
1580 return integer_zero_node;
1582 /* Fall through. */
1583 case GIMPLE_BIND:
1584 case GIMPLE_CATCH:
1585 case GIMPLE_EH_FILTER:
1586 case GIMPLE_TRANSACTION:
1587 /* Walk the sub-statements. */
1588 *handled_ops_p = false;
1589 break;
1590 default:
1591 /* Save the first "real" statement (not a decl/lexical scope/...). */
1592 wi->info = stmt;
1593 return integer_zero_node;
1595 return NULL_TREE;
1598 /* Possibly warn about unreachable statements between switch's controlling
1599 expression and the first case. SEQ is the body of a switch expression. */
1601 static void
1602 maybe_warn_switch_unreachable (gimple_seq seq)
1604 if (!warn_switch_unreachable
1605 /* This warning doesn't play well with Fortran when optimizations
1606 are on. */
1607 || lang_GNU_Fortran ()
1608 || seq == NULL)
1609 return;
1611 struct walk_stmt_info wi;
1612 memset (&wi, 0, sizeof (wi));
1613 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1614 gimple *stmt = (gimple *) wi.info;
1616 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1618 if (gimple_code (stmt) == GIMPLE_GOTO
1619 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1620 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1621 /* Don't warn for compiler-generated gotos. These occur
1622 in Duff's devices, for example. */;
1623 else
1624 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1625 "statement will never be executed");
1630 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1631 branch to. */
1633 static enum gimplify_status
1634 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1636 tree switch_expr = *expr_p;
1637 gimple_seq switch_body_seq = NULL;
1638 enum gimplify_status ret;
1639 tree index_type = TREE_TYPE (switch_expr);
1640 if (index_type == NULL_TREE)
1641 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1643 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1644 fb_rvalue);
1645 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1646 return ret;
1648 if (SWITCH_BODY (switch_expr))
1650 vec<tree> labels;
1651 vec<tree> saved_labels;
1652 tree default_case = NULL_TREE;
1653 gswitch *switch_stmt;
1655 /* If someone can be bothered to fill in the labels, they can
1656 be bothered to null out the body too. */
1657 gcc_assert (!SWITCH_LABELS (switch_expr));
1659 /* Save old labels, get new ones from body, then restore the old
1660 labels. Save all the things from the switch body to append after. */
1661 saved_labels = gimplify_ctxp->case_labels;
1662 gimplify_ctxp->case_labels.create (8);
1664 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1666 maybe_warn_switch_unreachable (switch_body_seq);
1668 labels = gimplify_ctxp->case_labels;
1669 gimplify_ctxp->case_labels = saved_labels;
1671 preprocess_case_label_vec_for_gimple (labels, index_type,
1672 &default_case);
1674 if (!default_case)
1676 glabel *new_default;
1678 default_case
1679 = build_case_label (NULL_TREE, NULL_TREE,
1680 create_artificial_label (UNKNOWN_LOCATION));
1681 new_default = gimple_build_label (CASE_LABEL (default_case));
1682 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1685 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
1686 default_case, labels);
1687 gimplify_seq_add_stmt (pre_p, switch_stmt);
1688 gimplify_seq_add_seq (pre_p, switch_body_seq);
1689 labels.release ();
1691 else
1692 gcc_assert (SWITCH_LABELS (switch_expr));
1694 return GS_ALL_DONE;
1697 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1699 static enum gimplify_status
1700 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1702 struct gimplify_ctx *ctxp;
1703 glabel *label_stmt;
1705 /* Invalid programs can play Duff's Device type games with, for example,
1706 #pragma omp parallel. At least in the C front end, we don't
1707 detect such invalid branches until after gimplification, in the
1708 diagnose_omp_blocks pass. */
1709 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1710 if (ctxp->case_labels.exists ())
1711 break;
1713 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
1714 ctxp->case_labels.safe_push (*expr_p);
1715 gimplify_seq_add_stmt (pre_p, label_stmt);
1717 return GS_ALL_DONE;
1720 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1721 if necessary. */
1723 tree
1724 build_and_jump (tree *label_p)
1726 if (label_p == NULL)
1727 /* If there's nowhere to jump, just fall through. */
1728 return NULL_TREE;
1730 if (*label_p == NULL_TREE)
1732 tree label = create_artificial_label (UNKNOWN_LOCATION);
1733 *label_p = label;
1736 return build1 (GOTO_EXPR, void_type_node, *label_p);
1739 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1740 This also involves building a label to jump to and communicating it to
1741 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1743 static enum gimplify_status
1744 gimplify_exit_expr (tree *expr_p)
1746 tree cond = TREE_OPERAND (*expr_p, 0);
1747 tree expr;
1749 expr = build_and_jump (&gimplify_ctxp->exit_label);
1750 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1751 *expr_p = expr;
1753 return GS_OK;
1756 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1757 different from its canonical type, wrap the whole thing inside a
1758 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1759 type.
1761 The canonical type of a COMPONENT_REF is the type of the field being
1762 referenced--unless the field is a bit-field which can be read directly
1763 in a smaller mode, in which case the canonical type is the
1764 sign-appropriate type corresponding to that mode. */
1766 static void
1767 canonicalize_component_ref (tree *expr_p)
1769 tree expr = *expr_p;
1770 tree type;
1772 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1774 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1775 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1776 else
1777 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1779 /* One could argue that all the stuff below is not necessary for
1780 the non-bitfield case and declare it a FE error if type
1781 adjustment would be needed. */
1782 if (TREE_TYPE (expr) != type)
1784 #ifdef ENABLE_TYPES_CHECKING
1785 tree old_type = TREE_TYPE (expr);
1786 #endif
1787 int type_quals;
1789 /* We need to preserve qualifiers and propagate them from
1790 operand 0. */
1791 type_quals = TYPE_QUALS (type)
1792 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1793 if (TYPE_QUALS (type) != type_quals)
1794 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1796 /* Set the type of the COMPONENT_REF to the underlying type. */
1797 TREE_TYPE (expr) = type;
1799 #ifdef ENABLE_TYPES_CHECKING
1800 /* It is now a FE error, if the conversion from the canonical
1801 type to the original expression type is not useless. */
1802 gcc_assert (useless_type_conversion_p (old_type, type));
1803 #endif
1807 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1808 to foo, embed that change in the ADDR_EXPR by converting
1809 T array[U];
1810 (T *)&array
1812 &array[L]
1813 where L is the lower bound. For simplicity, only do this for constant
1814 lower bound.
1815 The constraint is that the type of &array[L] is trivially convertible
1816 to T *. */
1818 static void
1819 canonicalize_addr_expr (tree *expr_p)
1821 tree expr = *expr_p;
1822 tree addr_expr = TREE_OPERAND (expr, 0);
1823 tree datype, ddatype, pddatype;
1825 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1826 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1827 || TREE_CODE (addr_expr) != ADDR_EXPR)
1828 return;
1830 /* The addr_expr type should be a pointer to an array. */
1831 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1832 if (TREE_CODE (datype) != ARRAY_TYPE)
1833 return;
1835 /* The pointer to element type shall be trivially convertible to
1836 the expression pointer type. */
1837 ddatype = TREE_TYPE (datype);
1838 pddatype = build_pointer_type (ddatype);
1839 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1840 pddatype))
1841 return;
1843 /* The lower bound and element sizes must be constant. */
1844 if (!TYPE_SIZE_UNIT (ddatype)
1845 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1846 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1847 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1848 return;
1850 /* All checks succeeded. Build a new node to merge the cast. */
1851 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1852 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1853 NULL_TREE, NULL_TREE);
1854 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1856 /* We can have stripped a required restrict qualifier above. */
1857 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1858 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1861 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1862 underneath as appropriate. */
1864 static enum gimplify_status
1865 gimplify_conversion (tree *expr_p)
1867 location_t loc = EXPR_LOCATION (*expr_p);
1868 gcc_assert (CONVERT_EXPR_P (*expr_p));
1870 /* Then strip away all but the outermost conversion. */
1871 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1873 /* And remove the outermost conversion if it's useless. */
1874 if (tree_ssa_useless_type_conversion (*expr_p))
1875 *expr_p = TREE_OPERAND (*expr_p, 0);
1877 /* If we still have a conversion at the toplevel,
1878 then canonicalize some constructs. */
1879 if (CONVERT_EXPR_P (*expr_p))
1881 tree sub = TREE_OPERAND (*expr_p, 0);
1883 /* If a NOP conversion is changing the type of a COMPONENT_REF
1884 expression, then canonicalize its type now in order to expose more
1885 redundant conversions. */
1886 if (TREE_CODE (sub) == COMPONENT_REF)
1887 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1889 /* If a NOP conversion is changing a pointer to array of foo
1890 to a pointer to foo, embed that change in the ADDR_EXPR. */
1891 else if (TREE_CODE (sub) == ADDR_EXPR)
1892 canonicalize_addr_expr (expr_p);
1895 /* If we have a conversion to a non-register type force the
1896 use of a VIEW_CONVERT_EXPR instead. */
1897 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1898 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1899 TREE_OPERAND (*expr_p, 0));
1901 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
1902 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
1903 TREE_SET_CODE (*expr_p, NOP_EXPR);
1905 return GS_OK;
1908 /* Nonlocal VLAs seen in the current function. */
1909 static hash_set<tree> *nonlocal_vlas;
1911 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1912 static tree nonlocal_vla_vars;
1914 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1915 DECL_VALUE_EXPR, and it's worth re-examining things. */
1917 static enum gimplify_status
1918 gimplify_var_or_parm_decl (tree *expr_p)
1920 tree decl = *expr_p;
1922 /* ??? If this is a local variable, and it has not been seen in any
1923 outer BIND_EXPR, then it's probably the result of a duplicate
1924 declaration, for which we've already issued an error. It would
1925 be really nice if the front end wouldn't leak these at all.
1926 Currently the only known culprit is C++ destructors, as seen
1927 in g++.old-deja/g++.jason/binding.C. */
1928 if (TREE_CODE (decl) == VAR_DECL
1929 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1930 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1931 && decl_function_context (decl) == current_function_decl)
1933 gcc_assert (seen_error ());
1934 return GS_ERROR;
1937 /* When within an OMP context, notice uses of variables. */
1938 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1939 return GS_ALL_DONE;
1941 /* If the decl is an alias for another expression, substitute it now. */
1942 if (DECL_HAS_VALUE_EXPR_P (decl))
1944 tree value_expr = DECL_VALUE_EXPR (decl);
1946 /* For referenced nonlocal VLAs add a decl for debugging purposes
1947 to the current function. */
1948 if (TREE_CODE (decl) == VAR_DECL
1949 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1950 && nonlocal_vlas != NULL
1951 && TREE_CODE (value_expr) == INDIRECT_REF
1952 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1953 && decl_function_context (decl) != current_function_decl)
1955 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1956 while (ctx
1957 && (ctx->region_type == ORT_WORKSHARE
1958 || ctx->region_type == ORT_SIMD
1959 || ctx->region_type == ORT_ACC))
1960 ctx = ctx->outer_context;
1961 if (!ctx && !nonlocal_vlas->add (decl))
1963 tree copy = copy_node (decl);
1965 lang_hooks.dup_lang_specific_decl (copy);
1966 SET_DECL_RTL (copy, 0);
1967 TREE_USED (copy) = 1;
1968 DECL_CHAIN (copy) = nonlocal_vla_vars;
1969 nonlocal_vla_vars = copy;
1970 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1971 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1975 *expr_p = unshare_expr (value_expr);
1976 return GS_OK;
1979 return GS_ALL_DONE;
1982 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1984 static void
1985 recalculate_side_effects (tree t)
1987 enum tree_code code = TREE_CODE (t);
1988 int len = TREE_OPERAND_LENGTH (t);
1989 int i;
1991 switch (TREE_CODE_CLASS (code))
1993 case tcc_expression:
1994 switch (code)
1996 case INIT_EXPR:
1997 case MODIFY_EXPR:
1998 case VA_ARG_EXPR:
1999 case PREDECREMENT_EXPR:
2000 case PREINCREMENT_EXPR:
2001 case POSTDECREMENT_EXPR:
2002 case POSTINCREMENT_EXPR:
2003 /* All of these have side-effects, no matter what their
2004 operands are. */
2005 return;
2007 default:
2008 break;
2010 /* Fall through. */
2012 case tcc_comparison: /* a comparison expression */
2013 case tcc_unary: /* a unary arithmetic expression */
2014 case tcc_binary: /* a binary arithmetic expression */
2015 case tcc_reference: /* a reference */
2016 case tcc_vl_exp: /* a function call */
2017 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2018 for (i = 0; i < len; ++i)
2020 tree op = TREE_OPERAND (t, i);
2021 if (op && TREE_SIDE_EFFECTS (op))
2022 TREE_SIDE_EFFECTS (t) = 1;
2024 break;
2026 case tcc_constant:
2027 /* No side-effects. */
2028 return;
2030 default:
2031 gcc_unreachable ();
2035 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2036 node *EXPR_P.
2038 compound_lval
2039 : min_lval '[' val ']'
2040 | min_lval '.' ID
2041 | compound_lval '[' val ']'
2042 | compound_lval '.' ID
2044 This is not part of the original SIMPLE definition, which separates
2045 array and member references, but it seems reasonable to handle them
2046 together. Also, this way we don't run into problems with union
2047 aliasing; gcc requires that for accesses through a union to alias, the
2048 union reference must be explicit, which was not always the case when we
2049 were splitting up array and member refs.
2051 PRE_P points to the sequence where side effects that must happen before
2052 *EXPR_P should be stored.
2054 POST_P points to the sequence where side effects that must happen after
2055 *EXPR_P should be stored. */
2057 static enum gimplify_status
2058 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2059 fallback_t fallback)
2061 tree *p;
2062 enum gimplify_status ret = GS_ALL_DONE, tret;
2063 int i;
2064 location_t loc = EXPR_LOCATION (*expr_p);
2065 tree expr = *expr_p;
2067 /* Create a stack of the subexpressions so later we can walk them in
2068 order from inner to outer. */
2069 auto_vec<tree, 10> expr_stack;
2071 /* We can handle anything that get_inner_reference can deal with. */
2072 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2074 restart:
2075 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2076 if (TREE_CODE (*p) == INDIRECT_REF)
2077 *p = fold_indirect_ref_loc (loc, *p);
2079 if (handled_component_p (*p))
2081 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2082 additional COMPONENT_REFs. */
2083 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
2084 && gimplify_var_or_parm_decl (p) == GS_OK)
2085 goto restart;
2086 else
2087 break;
2089 expr_stack.safe_push (*p);
2092 gcc_assert (expr_stack.length ());
2094 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2095 walked through and P points to the innermost expression.
2097 Java requires that we elaborated nodes in source order. That
2098 means we must gimplify the inner expression followed by each of
2099 the indices, in order. But we can't gimplify the inner
2100 expression until we deal with any variable bounds, sizes, or
2101 positions in order to deal with PLACEHOLDER_EXPRs.
2103 So we do this in three steps. First we deal with the annotations
2104 for any variables in the components, then we gimplify the base,
2105 then we gimplify any indices, from left to right. */
2106 for (i = expr_stack.length () - 1; i >= 0; i--)
2108 tree t = expr_stack[i];
2110 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2112 /* Gimplify the low bound and element type size and put them into
2113 the ARRAY_REF. If these values are set, they have already been
2114 gimplified. */
2115 if (TREE_OPERAND (t, 2) == NULL_TREE)
2117 tree low = unshare_expr (array_ref_low_bound (t));
2118 if (!is_gimple_min_invariant (low))
2120 TREE_OPERAND (t, 2) = low;
2121 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2122 post_p, is_gimple_reg,
2123 fb_rvalue);
2124 ret = MIN (ret, tret);
2127 else
2129 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2130 is_gimple_reg, fb_rvalue);
2131 ret = MIN (ret, tret);
2134 if (TREE_OPERAND (t, 3) == NULL_TREE)
2136 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2137 tree elmt_size = unshare_expr (array_ref_element_size (t));
2138 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2140 /* Divide the element size by the alignment of the element
2141 type (above). */
2142 elmt_size
2143 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2145 if (!is_gimple_min_invariant (elmt_size))
2147 TREE_OPERAND (t, 3) = elmt_size;
2148 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2149 post_p, is_gimple_reg,
2150 fb_rvalue);
2151 ret = MIN (ret, tret);
2154 else
2156 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2157 is_gimple_reg, fb_rvalue);
2158 ret = MIN (ret, tret);
2161 else if (TREE_CODE (t) == COMPONENT_REF)
2163 /* Set the field offset into T and gimplify it. */
2164 if (TREE_OPERAND (t, 2) == NULL_TREE)
2166 tree offset = unshare_expr (component_ref_field_offset (t));
2167 tree field = TREE_OPERAND (t, 1);
2168 tree factor
2169 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2171 /* Divide the offset by its alignment. */
2172 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2174 if (!is_gimple_min_invariant (offset))
2176 TREE_OPERAND (t, 2) = offset;
2177 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2178 post_p, is_gimple_reg,
2179 fb_rvalue);
2180 ret = MIN (ret, tret);
2183 else
2185 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2186 is_gimple_reg, fb_rvalue);
2187 ret = MIN (ret, tret);
2192 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2193 so as to match the min_lval predicate. Failure to do so may result
2194 in the creation of large aggregate temporaries. */
2195 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2196 fallback | fb_lvalue);
2197 ret = MIN (ret, tret);
2199 /* And finally, the indices and operands of ARRAY_REF. During this
2200 loop we also remove any useless conversions. */
2201 for (; expr_stack.length () > 0; )
2203 tree t = expr_stack.pop ();
2205 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2207 /* Gimplify the dimension. */
2208 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2210 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2211 is_gimple_val, fb_rvalue);
2212 ret = MIN (ret, tret);
2216 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2218 /* The innermost expression P may have originally had
2219 TREE_SIDE_EFFECTS set which would have caused all the outer
2220 expressions in *EXPR_P leading to P to also have had
2221 TREE_SIDE_EFFECTS set. */
2222 recalculate_side_effects (t);
2225 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2226 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2228 canonicalize_component_ref (expr_p);
2231 expr_stack.release ();
2233 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2235 return ret;
2238 /* Gimplify the self modifying expression pointed to by EXPR_P
2239 (++, --, +=, -=).
2241 PRE_P points to the list where side effects that must happen before
2242 *EXPR_P should be stored.
2244 POST_P points to the list where side effects that must happen after
2245 *EXPR_P should be stored.
2247 WANT_VALUE is nonzero iff we want to use the value of this expression
2248 in another expression.
2250 ARITH_TYPE is the type the computation should be performed in. */
2252 enum gimplify_status
2253 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2254 bool want_value, tree arith_type)
2256 enum tree_code code;
2257 tree lhs, lvalue, rhs, t1;
2258 gimple_seq post = NULL, *orig_post_p = post_p;
2259 bool postfix;
2260 enum tree_code arith_code;
2261 enum gimplify_status ret;
2262 location_t loc = EXPR_LOCATION (*expr_p);
2264 code = TREE_CODE (*expr_p);
2266 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2267 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2269 /* Prefix or postfix? */
2270 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2271 /* Faster to treat as prefix if result is not used. */
2272 postfix = want_value;
2273 else
2274 postfix = false;
2276 /* For postfix, make sure the inner expression's post side effects
2277 are executed after side effects from this expression. */
2278 if (postfix)
2279 post_p = &post;
2281 /* Add or subtract? */
2282 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2283 arith_code = PLUS_EXPR;
2284 else
2285 arith_code = MINUS_EXPR;
2287 /* Gimplify the LHS into a GIMPLE lvalue. */
2288 lvalue = TREE_OPERAND (*expr_p, 0);
2289 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2290 if (ret == GS_ERROR)
2291 return ret;
2293 /* Extract the operands to the arithmetic operation. */
2294 lhs = lvalue;
2295 rhs = TREE_OPERAND (*expr_p, 1);
2297 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2298 that as the result value and in the postqueue operation. */
2299 if (postfix)
2301 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2302 if (ret == GS_ERROR)
2303 return ret;
2305 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2308 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2309 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2311 rhs = convert_to_ptrofftype_loc (loc, rhs);
2312 if (arith_code == MINUS_EXPR)
2313 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2314 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2316 else
2317 t1 = fold_convert (TREE_TYPE (*expr_p),
2318 fold_build2 (arith_code, arith_type,
2319 fold_convert (arith_type, lhs),
2320 fold_convert (arith_type, rhs)));
2322 if (postfix)
2324 gimplify_assign (lvalue, t1, pre_p);
2325 gimplify_seq_add_seq (orig_post_p, post);
2326 *expr_p = lhs;
2327 return GS_ALL_DONE;
2329 else
2331 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2332 return GS_OK;
2336 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2338 static void
2339 maybe_with_size_expr (tree *expr_p)
2341 tree expr = *expr_p;
2342 tree type = TREE_TYPE (expr);
2343 tree size;
2345 /* If we've already wrapped this or the type is error_mark_node, we can't do
2346 anything. */
2347 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2348 || type == error_mark_node)
2349 return;
2351 /* If the size isn't known or is a constant, we have nothing to do. */
2352 size = TYPE_SIZE_UNIT (type);
2353 if (!size || TREE_CODE (size) == INTEGER_CST)
2354 return;
2356 /* Otherwise, make a WITH_SIZE_EXPR. */
2357 size = unshare_expr (size);
2358 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2359 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2362 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2363 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2364 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
2365 gimplified to an SSA name. */
2367 enum gimplify_status
2368 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
2369 bool allow_ssa)
2371 bool (*test) (tree);
2372 fallback_t fb;
2374 /* In general, we allow lvalues for function arguments to avoid
2375 extra overhead of copying large aggregates out of even larger
2376 aggregates into temporaries only to copy the temporaries to
2377 the argument list. Make optimizers happy by pulling out to
2378 temporaries those types that fit in registers. */
2379 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2380 test = is_gimple_val, fb = fb_rvalue;
2381 else
2383 test = is_gimple_lvalue, fb = fb_either;
2384 /* Also strip a TARGET_EXPR that would force an extra copy. */
2385 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2387 tree init = TARGET_EXPR_INITIAL (*arg_p);
2388 if (init
2389 && !VOID_TYPE_P (TREE_TYPE (init)))
2390 *arg_p = init;
2394 /* If this is a variable sized type, we must remember the size. */
2395 maybe_with_size_expr (arg_p);
2397 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2398 /* Make sure arguments have the same location as the function call
2399 itself. */
2400 protected_set_expr_location (*arg_p, call_location);
2402 /* There is a sequence point before a function call. Side effects in
2403 the argument list must occur before the actual call. So, when
2404 gimplifying arguments, force gimplify_expr to use an internal
2405 post queue which is then appended to the end of PRE_P. */
2406 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
2409 /* Don't fold inside offloading or taskreg regions: it can break code by
2410 adding decl references that weren't in the source. We'll do it during
2411 omplower pass instead. */
2413 static bool
2414 maybe_fold_stmt (gimple_stmt_iterator *gsi)
2416 struct gimplify_omp_ctx *ctx;
2417 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2418 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
2419 return false;
2420 return fold_stmt (gsi);
2423 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2424 WANT_VALUE is true if the result of the call is desired. */
2426 static enum gimplify_status
2427 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2429 tree fndecl, parms, p, fnptrtype;
2430 enum gimplify_status ret;
2431 int i, nargs;
2432 gcall *call;
2433 bool builtin_va_start_p = false;
2434 location_t loc = EXPR_LOCATION (*expr_p);
2436 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2438 /* For reliable diagnostics during inlining, it is necessary that
2439 every call_expr be annotated with file and line. */
2440 if (! EXPR_HAS_LOCATION (*expr_p))
2441 SET_EXPR_LOCATION (*expr_p, input_location);
2443 /* Gimplify internal functions created in the FEs. */
2444 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2446 if (want_value)
2447 return GS_ALL_DONE;
2449 nargs = call_expr_nargs (*expr_p);
2450 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2451 auto_vec<tree> vargs (nargs);
2453 for (i = 0; i < nargs; i++)
2455 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2456 EXPR_LOCATION (*expr_p));
2457 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2459 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
2460 gimplify_seq_add_stmt (pre_p, call);
2461 return GS_ALL_DONE;
2464 /* This may be a call to a builtin function.
2466 Builtin function calls may be transformed into different
2467 (and more efficient) builtin function calls under certain
2468 circumstances. Unfortunately, gimplification can muck things
2469 up enough that the builtin expanders are not aware that certain
2470 transformations are still valid.
2472 So we attempt transformation/gimplification of the call before
2473 we gimplify the CALL_EXPR. At this time we do not manage to
2474 transform all calls in the same manner as the expanders do, but
2475 we do transform most of them. */
2476 fndecl = get_callee_fndecl (*expr_p);
2477 if (fndecl
2478 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2479 switch (DECL_FUNCTION_CODE (fndecl))
2481 case BUILT_IN_ALLOCA:
2482 case BUILT_IN_ALLOCA_WITH_ALIGN:
2483 /* If the call has been built for a variable-sized object, then we
2484 want to restore the stack level when the enclosing BIND_EXPR is
2485 exited to reclaim the allocated space; otherwise, we precisely
2486 need to do the opposite and preserve the latest stack level. */
2487 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
2488 gimplify_ctxp->save_stack = true;
2489 else
2490 gimplify_ctxp->keep_stack = true;
2491 break;
2493 case BUILT_IN_VA_START:
2495 builtin_va_start_p = TRUE;
2496 if (call_expr_nargs (*expr_p) < 2)
2498 error ("too few arguments to function %<va_start%>");
2499 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2500 return GS_OK;
2503 if (fold_builtin_next_arg (*expr_p, true))
2505 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2506 return GS_OK;
2508 break;
2511 default:
2514 if (fndecl && DECL_BUILT_IN (fndecl))
2516 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2517 if (new_tree && new_tree != *expr_p)
2519 /* There was a transformation of this call which computes the
2520 same value, but in a more efficient way. Return and try
2521 again. */
2522 *expr_p = new_tree;
2523 return GS_OK;
2527 /* Remember the original function pointer type. */
2528 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2530 /* There is a sequence point before the call, so any side effects in
2531 the calling expression must occur before the actual call. Force
2532 gimplify_expr to use an internal post queue. */
2533 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2534 is_gimple_call_addr, fb_rvalue);
2536 nargs = call_expr_nargs (*expr_p);
2538 /* Get argument types for verification. */
2539 fndecl = get_callee_fndecl (*expr_p);
2540 parms = NULL_TREE;
2541 if (fndecl)
2542 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2543 else
2544 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
2546 if (fndecl && DECL_ARGUMENTS (fndecl))
2547 p = DECL_ARGUMENTS (fndecl);
2548 else if (parms)
2549 p = parms;
2550 else
2551 p = NULL_TREE;
2552 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2555 /* If the last argument is __builtin_va_arg_pack () and it is not
2556 passed as a named argument, decrease the number of CALL_EXPR
2557 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2558 if (!p
2559 && i < nargs
2560 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2562 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2563 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2565 if (last_arg_fndecl
2566 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2567 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2568 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2570 tree call = *expr_p;
2572 --nargs;
2573 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2574 CALL_EXPR_FN (call),
2575 nargs, CALL_EXPR_ARGP (call));
2577 /* Copy all CALL_EXPR flags, location and block, except
2578 CALL_EXPR_VA_ARG_PACK flag. */
2579 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2580 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2581 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2582 = CALL_EXPR_RETURN_SLOT_OPT (call);
2583 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2584 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2586 /* Set CALL_EXPR_VA_ARG_PACK. */
2587 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2591 /* If the call returns twice then after building the CFG the call
2592 argument computations will no longer dominate the call because
2593 we add an abnormal incoming edge to the call. So do not use SSA
2594 vars there. */
2595 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
2597 /* Gimplify the function arguments. */
2598 if (nargs > 0)
2600 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2601 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2602 PUSH_ARGS_REVERSED ? i-- : i++)
2604 enum gimplify_status t;
2606 /* Avoid gimplifying the second argument to va_start, which needs to
2607 be the plain PARM_DECL. */
2608 if ((i != 1) || !builtin_va_start_p)
2610 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2611 EXPR_LOCATION (*expr_p), ! returns_twice);
2613 if (t == GS_ERROR)
2614 ret = GS_ERROR;
2619 /* Gimplify the static chain. */
2620 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
2622 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
2623 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
2624 else
2626 enum gimplify_status t;
2627 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
2628 EXPR_LOCATION (*expr_p), ! returns_twice);
2629 if (t == GS_ERROR)
2630 ret = GS_ERROR;
2634 /* Verify the function result. */
2635 if (want_value && fndecl
2636 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2638 error_at (loc, "using result of function returning %<void%>");
2639 ret = GS_ERROR;
2642 /* Try this again in case gimplification exposed something. */
2643 if (ret != GS_ERROR)
2645 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2647 if (new_tree && new_tree != *expr_p)
2649 /* There was a transformation of this call which computes the
2650 same value, but in a more efficient way. Return and try
2651 again. */
2652 *expr_p = new_tree;
2653 return GS_OK;
2656 else
2658 *expr_p = error_mark_node;
2659 return GS_ERROR;
2662 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2663 decl. This allows us to eliminate redundant or useless
2664 calls to "const" functions. */
2665 if (TREE_CODE (*expr_p) == CALL_EXPR)
2667 int flags = call_expr_flags (*expr_p);
2668 if (flags & (ECF_CONST | ECF_PURE)
2669 /* An infinite loop is considered a side effect. */
2670 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2671 TREE_SIDE_EFFECTS (*expr_p) = 0;
2674 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2675 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2676 form and delegate the creation of a GIMPLE_CALL to
2677 gimplify_modify_expr. This is always possible because when
2678 WANT_VALUE is true, the caller wants the result of this call into
2679 a temporary, which means that we will emit an INIT_EXPR in
2680 internal_get_tmp_var which will then be handled by
2681 gimplify_modify_expr. */
2682 if (!want_value)
2684 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2685 have to do is replicate it as a GIMPLE_CALL tuple. */
2686 gimple_stmt_iterator gsi;
2687 call = gimple_build_call_from_tree (*expr_p);
2688 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2689 notice_special_calls (call);
2690 gimplify_seq_add_stmt (pre_p, call);
2691 gsi = gsi_last (*pre_p);
2692 maybe_fold_stmt (&gsi);
2693 *expr_p = NULL_TREE;
2695 else
2696 /* Remember the original function type. */
2697 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2698 CALL_EXPR_FN (*expr_p));
2700 return ret;
2703 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2704 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2706 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2707 condition is true or false, respectively. If null, we should generate
2708 our own to skip over the evaluation of this specific expression.
2710 LOCUS is the source location of the COND_EXPR.
2712 This function is the tree equivalent of do_jump.
2714 shortcut_cond_r should only be called by shortcut_cond_expr. */
2716 static tree
2717 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2718 location_t locus)
2720 tree local_label = NULL_TREE;
2721 tree t, expr = NULL;
2723 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2724 retain the shortcut semantics. Just insert the gotos here;
2725 shortcut_cond_expr will append the real blocks later. */
2726 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2728 location_t new_locus;
2730 /* Turn if (a && b) into
2732 if (a); else goto no;
2733 if (b) goto yes; else goto no;
2734 (no:) */
2736 if (false_label_p == NULL)
2737 false_label_p = &local_label;
2739 /* Keep the original source location on the first 'if'. */
2740 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2741 append_to_statement_list (t, &expr);
2743 /* Set the source location of the && on the second 'if'. */
2744 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2745 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2746 new_locus);
2747 append_to_statement_list (t, &expr);
2749 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2751 location_t new_locus;
2753 /* Turn if (a || b) into
2755 if (a) goto yes;
2756 if (b) goto yes; else goto no;
2757 (yes:) */
2759 if (true_label_p == NULL)
2760 true_label_p = &local_label;
2762 /* Keep the original source location on the first 'if'. */
2763 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2764 append_to_statement_list (t, &expr);
2766 /* Set the source location of the || on the second 'if'. */
2767 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2768 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2769 new_locus);
2770 append_to_statement_list (t, &expr);
2772 else if (TREE_CODE (pred) == COND_EXPR
2773 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2774 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2776 location_t new_locus;
2778 /* As long as we're messing with gotos, turn if (a ? b : c) into
2779 if (a)
2780 if (b) goto yes; else goto no;
2781 else
2782 if (c) goto yes; else goto no;
2784 Don't do this if one of the arms has void type, which can happen
2785 in C++ when the arm is throw. */
2787 /* Keep the original source location on the first 'if'. Set the source
2788 location of the ? on the second 'if'. */
2789 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2790 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2791 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2792 false_label_p, locus),
2793 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2794 false_label_p, new_locus));
2796 else
2798 expr = build3 (COND_EXPR, void_type_node, pred,
2799 build_and_jump (true_label_p),
2800 build_and_jump (false_label_p));
2801 SET_EXPR_LOCATION (expr, locus);
2804 if (local_label)
2806 t = build1 (LABEL_EXPR, void_type_node, local_label);
2807 append_to_statement_list (t, &expr);
2810 return expr;
2813 /* Given a conditional expression EXPR with short-circuit boolean
2814 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2815 predicate apart into the equivalent sequence of conditionals. */
2817 static tree
2818 shortcut_cond_expr (tree expr)
2820 tree pred = TREE_OPERAND (expr, 0);
2821 tree then_ = TREE_OPERAND (expr, 1);
2822 tree else_ = TREE_OPERAND (expr, 2);
2823 tree true_label, false_label, end_label, t;
2824 tree *true_label_p;
2825 tree *false_label_p;
2826 bool emit_end, emit_false, jump_over_else;
2827 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2828 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2830 /* First do simple transformations. */
2831 if (!else_se)
2833 /* If there is no 'else', turn
2834 if (a && b) then c
2835 into
2836 if (a) if (b) then c. */
2837 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2839 /* Keep the original source location on the first 'if'. */
2840 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2841 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2842 /* Set the source location of the && on the second 'if'. */
2843 if (EXPR_HAS_LOCATION (pred))
2844 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2845 then_ = shortcut_cond_expr (expr);
2846 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2847 pred = TREE_OPERAND (pred, 0);
2848 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2849 SET_EXPR_LOCATION (expr, locus);
2853 if (!then_se)
2855 /* If there is no 'then', turn
2856 if (a || b); else d
2857 into
2858 if (a); else if (b); else d. */
2859 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2861 /* Keep the original source location on the first 'if'. */
2862 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2863 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2864 /* Set the source location of the || on the second 'if'. */
2865 if (EXPR_HAS_LOCATION (pred))
2866 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2867 else_ = shortcut_cond_expr (expr);
2868 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2869 pred = TREE_OPERAND (pred, 0);
2870 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2871 SET_EXPR_LOCATION (expr, locus);
2875 /* If we're done, great. */
2876 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2877 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2878 return expr;
2880 /* Otherwise we need to mess with gotos. Change
2881 if (a) c; else d;
2883 if (a); else goto no;
2884 c; goto end;
2885 no: d; end:
2886 and recursively gimplify the condition. */
2888 true_label = false_label = end_label = NULL_TREE;
2890 /* If our arms just jump somewhere, hijack those labels so we don't
2891 generate jumps to jumps. */
2893 if (then_
2894 && TREE_CODE (then_) == GOTO_EXPR
2895 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2897 true_label = GOTO_DESTINATION (then_);
2898 then_ = NULL;
2899 then_se = false;
2902 if (else_
2903 && TREE_CODE (else_) == GOTO_EXPR
2904 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2906 false_label = GOTO_DESTINATION (else_);
2907 else_ = NULL;
2908 else_se = false;
2911 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2912 if (true_label)
2913 true_label_p = &true_label;
2914 else
2915 true_label_p = NULL;
2917 /* The 'else' branch also needs a label if it contains interesting code. */
2918 if (false_label || else_se)
2919 false_label_p = &false_label;
2920 else
2921 false_label_p = NULL;
2923 /* If there was nothing else in our arms, just forward the label(s). */
2924 if (!then_se && !else_se)
2925 return shortcut_cond_r (pred, true_label_p, false_label_p,
2926 EXPR_LOC_OR_LOC (expr, input_location));
2928 /* If our last subexpression already has a terminal label, reuse it. */
2929 if (else_se)
2930 t = expr_last (else_);
2931 else if (then_se)
2932 t = expr_last (then_);
2933 else
2934 t = NULL;
2935 if (t && TREE_CODE (t) == LABEL_EXPR)
2936 end_label = LABEL_EXPR_LABEL (t);
2938 /* If we don't care about jumping to the 'else' branch, jump to the end
2939 if the condition is false. */
2940 if (!false_label_p)
2941 false_label_p = &end_label;
2943 /* We only want to emit these labels if we aren't hijacking them. */
2944 emit_end = (end_label == NULL_TREE);
2945 emit_false = (false_label == NULL_TREE);
2947 /* We only emit the jump over the else clause if we have to--if the
2948 then clause may fall through. Otherwise we can wind up with a
2949 useless jump and a useless label at the end of gimplified code,
2950 which will cause us to think that this conditional as a whole
2951 falls through even if it doesn't. If we then inline a function
2952 which ends with such a condition, that can cause us to issue an
2953 inappropriate warning about control reaching the end of a
2954 non-void function. */
2955 jump_over_else = block_may_fallthru (then_);
2957 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2958 EXPR_LOC_OR_LOC (expr, input_location));
2960 expr = NULL;
2961 append_to_statement_list (pred, &expr);
2963 append_to_statement_list (then_, &expr);
2964 if (else_se)
2966 if (jump_over_else)
2968 tree last = expr_last (expr);
2969 t = build_and_jump (&end_label);
2970 if (EXPR_HAS_LOCATION (last))
2971 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2972 append_to_statement_list (t, &expr);
2974 if (emit_false)
2976 t = build1 (LABEL_EXPR, void_type_node, false_label);
2977 append_to_statement_list (t, &expr);
2979 append_to_statement_list (else_, &expr);
2981 if (emit_end && end_label)
2983 t = build1 (LABEL_EXPR, void_type_node, end_label);
2984 append_to_statement_list (t, &expr);
2987 return expr;
2990 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2992 tree
2993 gimple_boolify (tree expr)
2995 tree type = TREE_TYPE (expr);
2996 location_t loc = EXPR_LOCATION (expr);
2998 if (TREE_CODE (expr) == NE_EXPR
2999 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3000 && integer_zerop (TREE_OPERAND (expr, 1)))
3002 tree call = TREE_OPERAND (expr, 0);
3003 tree fn = get_callee_fndecl (call);
3005 /* For __builtin_expect ((long) (x), y) recurse into x as well
3006 if x is truth_value_p. */
3007 if (fn
3008 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3009 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3010 && call_expr_nargs (call) == 2)
3012 tree arg = CALL_EXPR_ARG (call, 0);
3013 if (arg)
3015 if (TREE_CODE (arg) == NOP_EXPR
3016 && TREE_TYPE (arg) == TREE_TYPE (call))
3017 arg = TREE_OPERAND (arg, 0);
3018 if (truth_value_p (TREE_CODE (arg)))
3020 arg = gimple_boolify (arg);
3021 CALL_EXPR_ARG (call, 0)
3022 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3028 switch (TREE_CODE (expr))
3030 case TRUTH_AND_EXPR:
3031 case TRUTH_OR_EXPR:
3032 case TRUTH_XOR_EXPR:
3033 case TRUTH_ANDIF_EXPR:
3034 case TRUTH_ORIF_EXPR:
3035 /* Also boolify the arguments of truth exprs. */
3036 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3037 /* FALLTHRU */
3039 case TRUTH_NOT_EXPR:
3040 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3042 /* These expressions always produce boolean results. */
3043 if (TREE_CODE (type) != BOOLEAN_TYPE)
3044 TREE_TYPE (expr) = boolean_type_node;
3045 return expr;
3047 case ANNOTATE_EXPR:
3048 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3050 case annot_expr_ivdep_kind:
3051 case annot_expr_no_vector_kind:
3052 case annot_expr_vector_kind:
3053 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3054 if (TREE_CODE (type) != BOOLEAN_TYPE)
3055 TREE_TYPE (expr) = boolean_type_node;
3056 return expr;
3057 default:
3058 gcc_unreachable ();
3061 default:
3062 if (COMPARISON_CLASS_P (expr))
3064 /* There expressions always prduce boolean results. */
3065 if (TREE_CODE (type) != BOOLEAN_TYPE)
3066 TREE_TYPE (expr) = boolean_type_node;
3067 return expr;
3069 /* Other expressions that get here must have boolean values, but
3070 might need to be converted to the appropriate mode. */
3071 if (TREE_CODE (type) == BOOLEAN_TYPE)
3072 return expr;
3073 return fold_convert_loc (loc, boolean_type_node, expr);
3077 /* Given a conditional expression *EXPR_P without side effects, gimplify
3078 its operands. New statements are inserted to PRE_P. */
3080 static enum gimplify_status
3081 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3083 tree expr = *expr_p, cond;
3084 enum gimplify_status ret, tret;
3085 enum tree_code code;
3087 cond = gimple_boolify (COND_EXPR_COND (expr));
3089 /* We need to handle && and || specially, as their gimplification
3090 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3091 code = TREE_CODE (cond);
3092 if (code == TRUTH_ANDIF_EXPR)
3093 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3094 else if (code == TRUTH_ORIF_EXPR)
3095 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3096 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3097 COND_EXPR_COND (*expr_p) = cond;
3099 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3100 is_gimple_val, fb_rvalue);
3101 ret = MIN (ret, tret);
3102 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3103 is_gimple_val, fb_rvalue);
3105 return MIN (ret, tret);
3108 /* Return true if evaluating EXPR could trap.
3109 EXPR is GENERIC, while tree_could_trap_p can be called
3110 only on GIMPLE. */
3112 static bool
3113 generic_expr_could_trap_p (tree expr)
3115 unsigned i, n;
3117 if (!expr || is_gimple_val (expr))
3118 return false;
3120 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3121 return true;
3123 n = TREE_OPERAND_LENGTH (expr);
3124 for (i = 0; i < n; i++)
3125 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3126 return true;
3128 return false;
3131 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3132 into
3134 if (p) if (p)
3135 t1 = a; a;
3136 else or else
3137 t1 = b; b;
3140 The second form is used when *EXPR_P is of type void.
3142 PRE_P points to the list where side effects that must happen before
3143 *EXPR_P should be stored. */
3145 static enum gimplify_status
3146 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3148 tree expr = *expr_p;
3149 tree type = TREE_TYPE (expr);
3150 location_t loc = EXPR_LOCATION (expr);
3151 tree tmp, arm1, arm2;
3152 enum gimplify_status ret;
3153 tree label_true, label_false, label_cont;
3154 bool have_then_clause_p, have_else_clause_p;
3155 gcond *cond_stmt;
3156 enum tree_code pred_code;
3157 gimple_seq seq = NULL;
3159 /* If this COND_EXPR has a value, copy the values into a temporary within
3160 the arms. */
3161 if (!VOID_TYPE_P (type))
3163 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3164 tree result;
3166 /* If either an rvalue is ok or we do not require an lvalue, create the
3167 temporary. But we cannot do that if the type is addressable. */
3168 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3169 && !TREE_ADDRESSABLE (type))
3171 if (gimplify_ctxp->allow_rhs_cond_expr
3172 /* If either branch has side effects or could trap, it can't be
3173 evaluated unconditionally. */
3174 && !TREE_SIDE_EFFECTS (then_)
3175 && !generic_expr_could_trap_p (then_)
3176 && !TREE_SIDE_EFFECTS (else_)
3177 && !generic_expr_could_trap_p (else_))
3178 return gimplify_pure_cond_expr (expr_p, pre_p);
3180 tmp = create_tmp_var (type, "iftmp");
3181 result = tmp;
3184 /* Otherwise, only create and copy references to the values. */
3185 else
3187 type = build_pointer_type (type);
3189 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3190 then_ = build_fold_addr_expr_loc (loc, then_);
3192 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3193 else_ = build_fold_addr_expr_loc (loc, else_);
3195 expr
3196 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3198 tmp = create_tmp_var (type, "iftmp");
3199 result = build_simple_mem_ref_loc (loc, tmp);
3202 /* Build the new then clause, `tmp = then_;'. But don't build the
3203 assignment if the value is void; in C++ it can be if it's a throw. */
3204 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3205 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3207 /* Similarly, build the new else clause, `tmp = else_;'. */
3208 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3209 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3211 TREE_TYPE (expr) = void_type_node;
3212 recalculate_side_effects (expr);
3214 /* Move the COND_EXPR to the prequeue. */
3215 gimplify_stmt (&expr, pre_p);
3217 *expr_p = result;
3218 return GS_ALL_DONE;
3221 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3222 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3223 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3224 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3226 /* Make sure the condition has BOOLEAN_TYPE. */
3227 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3229 /* Break apart && and || conditions. */
3230 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3231 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3233 expr = shortcut_cond_expr (expr);
3235 if (expr != *expr_p)
3237 *expr_p = expr;
3239 /* We can't rely on gimplify_expr to re-gimplify the expanded
3240 form properly, as cleanups might cause the target labels to be
3241 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3242 set up a conditional context. */
3243 gimple_push_condition ();
3244 gimplify_stmt (expr_p, &seq);
3245 gimple_pop_condition (pre_p);
3246 gimple_seq_add_seq (pre_p, seq);
3248 return GS_ALL_DONE;
3252 /* Now do the normal gimplification. */
3254 /* Gimplify condition. */
3255 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3256 fb_rvalue);
3257 if (ret == GS_ERROR)
3258 return GS_ERROR;
3259 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3261 gimple_push_condition ();
3263 have_then_clause_p = have_else_clause_p = false;
3264 if (TREE_OPERAND (expr, 1) != NULL
3265 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3266 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3267 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3268 == current_function_decl)
3269 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3270 have different locations, otherwise we end up with incorrect
3271 location information on the branches. */
3272 && (optimize
3273 || !EXPR_HAS_LOCATION (expr)
3274 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3275 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3277 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3278 have_then_clause_p = true;
3280 else
3281 label_true = create_artificial_label (UNKNOWN_LOCATION);
3282 if (TREE_OPERAND (expr, 2) != NULL
3283 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3284 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3285 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3286 == current_function_decl)
3287 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3288 have different locations, otherwise we end up with incorrect
3289 location information on the branches. */
3290 && (optimize
3291 || !EXPR_HAS_LOCATION (expr)
3292 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3293 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3295 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3296 have_else_clause_p = true;
3298 else
3299 label_false = create_artificial_label (UNKNOWN_LOCATION);
3301 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3302 &arm2);
3303 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3304 label_false);
3305 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
3306 gimplify_seq_add_stmt (&seq, cond_stmt);
3307 gimple_stmt_iterator gsi = gsi_last (seq);
3308 maybe_fold_stmt (&gsi);
3310 label_cont = NULL_TREE;
3311 if (!have_then_clause_p)
3313 /* For if (...) {} else { code; } put label_true after
3314 the else block. */
3315 if (TREE_OPERAND (expr, 1) == NULL_TREE
3316 && !have_else_clause_p
3317 && TREE_OPERAND (expr, 2) != NULL_TREE)
3318 label_cont = label_true;
3319 else
3321 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3322 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3323 /* For if (...) { code; } else {} or
3324 if (...) { code; } else goto label; or
3325 if (...) { code; return; } else { ... }
3326 label_cont isn't needed. */
3327 if (!have_else_clause_p
3328 && TREE_OPERAND (expr, 2) != NULL_TREE
3329 && gimple_seq_may_fallthru (seq))
3331 gimple *g;
3332 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3334 g = gimple_build_goto (label_cont);
3336 /* GIMPLE_COND's are very low level; they have embedded
3337 gotos. This particular embedded goto should not be marked
3338 with the location of the original COND_EXPR, as it would
3339 correspond to the COND_EXPR's condition, not the ELSE or the
3340 THEN arms. To avoid marking it with the wrong location, flag
3341 it as "no location". */
3342 gimple_set_do_not_emit_location (g);
3344 gimplify_seq_add_stmt (&seq, g);
3348 if (!have_else_clause_p)
3350 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3351 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3353 if (label_cont)
3354 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3356 gimple_pop_condition (pre_p);
3357 gimple_seq_add_seq (pre_p, seq);
3359 if (ret == GS_ERROR)
3360 ; /* Do nothing. */
3361 else if (have_then_clause_p || have_else_clause_p)
3362 ret = GS_ALL_DONE;
3363 else
3365 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3366 expr = TREE_OPERAND (expr, 0);
3367 gimplify_stmt (&expr, pre_p);
3370 *expr_p = NULL;
3371 return ret;
3374 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3375 to be marked addressable.
3377 We cannot rely on such an expression being directly markable if a temporary
3378 has been created by the gimplification. In this case, we create another
3379 temporary and initialize it with a copy, which will become a store after we
3380 mark it addressable. This can happen if the front-end passed us something
3381 that it could not mark addressable yet, like a Fortran pass-by-reference
3382 parameter (int) floatvar. */
3384 static void
3385 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3387 while (handled_component_p (*expr_p))
3388 expr_p = &TREE_OPERAND (*expr_p, 0);
3389 if (is_gimple_reg (*expr_p))
3391 /* Do not allow an SSA name as the temporary. */
3392 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
3393 DECL_GIMPLE_REG_P (var) = 0;
3394 *expr_p = var;
3398 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3399 a call to __builtin_memcpy. */
3401 static enum gimplify_status
3402 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3403 gimple_seq *seq_p)
3405 tree t, to, to_ptr, from, from_ptr;
3406 gcall *gs;
3407 location_t loc = EXPR_LOCATION (*expr_p);
3409 to = TREE_OPERAND (*expr_p, 0);
3410 from = TREE_OPERAND (*expr_p, 1);
3412 /* Mark the RHS addressable. Beware that it may not be possible to do so
3413 directly if a temporary has been created by the gimplification. */
3414 prepare_gimple_addressable (&from, seq_p);
3416 mark_addressable (from);
3417 from_ptr = build_fold_addr_expr_loc (loc, from);
3418 gimplify_arg (&from_ptr, seq_p, loc);
3420 mark_addressable (to);
3421 to_ptr = build_fold_addr_expr_loc (loc, to);
3422 gimplify_arg (&to_ptr, seq_p, loc);
3424 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3426 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3428 if (want_value)
3430 /* tmp = memcpy() */
3431 t = create_tmp_var (TREE_TYPE (to_ptr));
3432 gimple_call_set_lhs (gs, t);
3433 gimplify_seq_add_stmt (seq_p, gs);
3435 *expr_p = build_simple_mem_ref (t);
3436 return GS_ALL_DONE;
3439 gimplify_seq_add_stmt (seq_p, gs);
3440 *expr_p = NULL;
3441 return GS_ALL_DONE;
3444 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3445 a call to __builtin_memset. In this case we know that the RHS is
3446 a CONSTRUCTOR with an empty element list. */
3448 static enum gimplify_status
3449 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3450 gimple_seq *seq_p)
3452 tree t, from, to, to_ptr;
3453 gcall *gs;
3454 location_t loc = EXPR_LOCATION (*expr_p);
3456 /* Assert our assumptions, to abort instead of producing wrong code
3457 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3458 not be immediately exposed. */
3459 from = TREE_OPERAND (*expr_p, 1);
3460 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3461 from = TREE_OPERAND (from, 0);
3463 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3464 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3466 /* Now proceed. */
3467 to = TREE_OPERAND (*expr_p, 0);
3469 to_ptr = build_fold_addr_expr_loc (loc, to);
3470 gimplify_arg (&to_ptr, seq_p, loc);
3471 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3473 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3475 if (want_value)
3477 /* tmp = memset() */
3478 t = create_tmp_var (TREE_TYPE (to_ptr));
3479 gimple_call_set_lhs (gs, t);
3480 gimplify_seq_add_stmt (seq_p, gs);
3482 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3483 return GS_ALL_DONE;
3486 gimplify_seq_add_stmt (seq_p, gs);
3487 *expr_p = NULL;
3488 return GS_ALL_DONE;
3491 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3492 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3493 assignment. Return non-null if we detect a potential overlap. */
3495 struct gimplify_init_ctor_preeval_data
3497 /* The base decl of the lhs object. May be NULL, in which case we
3498 have to assume the lhs is indirect. */
3499 tree lhs_base_decl;
3501 /* The alias set of the lhs object. */
3502 alias_set_type lhs_alias_set;
3505 static tree
3506 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3508 struct gimplify_init_ctor_preeval_data *data
3509 = (struct gimplify_init_ctor_preeval_data *) xdata;
3510 tree t = *tp;
3512 /* If we find the base object, obviously we have overlap. */
3513 if (data->lhs_base_decl == t)
3514 return t;
3516 /* If the constructor component is indirect, determine if we have a
3517 potential overlap with the lhs. The only bits of information we
3518 have to go on at this point are addressability and alias sets. */
3519 if ((INDIRECT_REF_P (t)
3520 || TREE_CODE (t) == MEM_REF)
3521 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3522 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3523 return t;
3525 /* If the constructor component is a call, determine if it can hide a
3526 potential overlap with the lhs through an INDIRECT_REF like above.
3527 ??? Ugh - this is completely broken. In fact this whole analysis
3528 doesn't look conservative. */
3529 if (TREE_CODE (t) == CALL_EXPR)
3531 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3533 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3534 if (POINTER_TYPE_P (TREE_VALUE (type))
3535 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3536 && alias_sets_conflict_p (data->lhs_alias_set,
3537 get_alias_set
3538 (TREE_TYPE (TREE_VALUE (type)))))
3539 return t;
3542 if (IS_TYPE_OR_DECL_P (t))
3543 *walk_subtrees = 0;
3544 return NULL;
3547 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3548 force values that overlap with the lhs (as described by *DATA)
3549 into temporaries. */
3551 static void
3552 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3553 struct gimplify_init_ctor_preeval_data *data)
3555 enum gimplify_status one;
3557 /* If the value is constant, then there's nothing to pre-evaluate. */
3558 if (TREE_CONSTANT (*expr_p))
3560 /* Ensure it does not have side effects, it might contain a reference to
3561 the object we're initializing. */
3562 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3563 return;
3566 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3567 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3568 return;
3570 /* Recurse for nested constructors. */
3571 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3573 unsigned HOST_WIDE_INT ix;
3574 constructor_elt *ce;
3575 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3577 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3578 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3580 return;
3583 /* If this is a variable sized type, we must remember the size. */
3584 maybe_with_size_expr (expr_p);
3586 /* Gimplify the constructor element to something appropriate for the rhs
3587 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3588 the gimplifier will consider this a store to memory. Doing this
3589 gimplification now means that we won't have to deal with complicated
3590 language-specific trees, nor trees like SAVE_EXPR that can induce
3591 exponential search behavior. */
3592 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3593 if (one == GS_ERROR)
3595 *expr_p = NULL;
3596 return;
3599 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3600 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3601 always be true for all scalars, since is_gimple_mem_rhs insists on a
3602 temporary variable for them. */
3603 if (DECL_P (*expr_p))
3604 return;
3606 /* If this is of variable size, we have no choice but to assume it doesn't
3607 overlap since we can't make a temporary for it. */
3608 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3609 return;
3611 /* Otherwise, we must search for overlap ... */
3612 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3613 return;
3615 /* ... and if found, force the value into a temporary. */
3616 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3619 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3620 a RANGE_EXPR in a CONSTRUCTOR for an array.
3622 var = lower;
3623 loop_entry:
3624 object[var] = value;
3625 if (var == upper)
3626 goto loop_exit;
3627 var = var + 1;
3628 goto loop_entry;
3629 loop_exit:
3631 We increment var _after_ the loop exit check because we might otherwise
3632 fail if upper == TYPE_MAX_VALUE (type for upper).
3634 Note that we never have to deal with SAVE_EXPRs here, because this has
3635 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3637 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3638 gimple_seq *, bool);
3640 static void
3641 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3642 tree value, tree array_elt_type,
3643 gimple_seq *pre_p, bool cleared)
3645 tree loop_entry_label, loop_exit_label, fall_thru_label;
3646 tree var, var_type, cref, tmp;
3648 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3649 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3650 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3652 /* Create and initialize the index variable. */
3653 var_type = TREE_TYPE (upper);
3654 var = create_tmp_var (var_type);
3655 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3657 /* Add the loop entry label. */
3658 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3660 /* Build the reference. */
3661 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3662 var, NULL_TREE, NULL_TREE);
3664 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3665 the store. Otherwise just assign value to the reference. */
3667 if (TREE_CODE (value) == CONSTRUCTOR)
3668 /* NB we might have to call ourself recursively through
3669 gimplify_init_ctor_eval if the value is a constructor. */
3670 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3671 pre_p, cleared);
3672 else
3673 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3675 /* We exit the loop when the index var is equal to the upper bound. */
3676 gimplify_seq_add_stmt (pre_p,
3677 gimple_build_cond (EQ_EXPR, var, upper,
3678 loop_exit_label, fall_thru_label));
3680 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3682 /* Otherwise, increment the index var... */
3683 tmp = build2 (PLUS_EXPR, var_type, var,
3684 fold_convert (var_type, integer_one_node));
3685 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3687 /* ...and jump back to the loop entry. */
3688 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3690 /* Add the loop exit label. */
3691 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3694 /* Return true if FDECL is accessing a field that is zero sized. */
3696 static bool
3697 zero_sized_field_decl (const_tree fdecl)
3699 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3700 && integer_zerop (DECL_SIZE (fdecl)))
3701 return true;
3702 return false;
3705 /* Return true if TYPE is zero sized. */
3707 static bool
3708 zero_sized_type (const_tree type)
3710 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3711 && integer_zerop (TYPE_SIZE (type)))
3712 return true;
3713 return false;
3716 /* A subroutine of gimplify_init_constructor. Generate individual
3717 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3718 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3719 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3720 zeroed first. */
3722 static void
3723 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3724 gimple_seq *pre_p, bool cleared)
3726 tree array_elt_type = NULL;
3727 unsigned HOST_WIDE_INT ix;
3728 tree purpose, value;
3730 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3731 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3733 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3735 tree cref;
3737 /* NULL values are created above for gimplification errors. */
3738 if (value == NULL)
3739 continue;
3741 if (cleared && initializer_zerop (value))
3742 continue;
3744 /* ??? Here's to hoping the front end fills in all of the indices,
3745 so we don't have to figure out what's missing ourselves. */
3746 gcc_assert (purpose);
3748 /* Skip zero-sized fields, unless value has side-effects. This can
3749 happen with calls to functions returning a zero-sized type, which
3750 we shouldn't discard. As a number of downstream passes don't
3751 expect sets of zero-sized fields, we rely on the gimplification of
3752 the MODIFY_EXPR we make below to drop the assignment statement. */
3753 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3754 continue;
3756 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3757 whole range. */
3758 if (TREE_CODE (purpose) == RANGE_EXPR)
3760 tree lower = TREE_OPERAND (purpose, 0);
3761 tree upper = TREE_OPERAND (purpose, 1);
3763 /* If the lower bound is equal to upper, just treat it as if
3764 upper was the index. */
3765 if (simple_cst_equal (lower, upper))
3766 purpose = upper;
3767 else
3769 gimplify_init_ctor_eval_range (object, lower, upper, value,
3770 array_elt_type, pre_p, cleared);
3771 continue;
3775 if (array_elt_type)
3777 /* Do not use bitsizetype for ARRAY_REF indices. */
3778 if (TYPE_DOMAIN (TREE_TYPE (object)))
3779 purpose
3780 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3781 purpose);
3782 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3783 purpose, NULL_TREE, NULL_TREE);
3785 else
3787 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3788 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3789 unshare_expr (object), purpose, NULL_TREE);
3792 if (TREE_CODE (value) == CONSTRUCTOR
3793 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3794 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3795 pre_p, cleared);
3796 else
3798 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3799 gimplify_and_add (init, pre_p);
3800 ggc_free (init);
3805 /* Return the appropriate RHS predicate for this LHS. */
3807 gimple_predicate
3808 rhs_predicate_for (tree lhs)
3810 if (is_gimple_reg (lhs))
3811 return is_gimple_reg_rhs_or_call;
3812 else
3813 return is_gimple_mem_rhs_or_call;
3816 /* Gimplify a C99 compound literal expression. This just means adding
3817 the DECL_EXPR before the current statement and using its anonymous
3818 decl instead. */
3820 static enum gimplify_status
3821 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3822 bool (*gimple_test_f) (tree),
3823 fallback_t fallback)
3825 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3826 tree decl = DECL_EXPR_DECL (decl_s);
3827 tree init = DECL_INITIAL (decl);
3828 /* Mark the decl as addressable if the compound literal
3829 expression is addressable now, otherwise it is marked too late
3830 after we gimplify the initialization expression. */
3831 if (TREE_ADDRESSABLE (*expr_p))
3832 TREE_ADDRESSABLE (decl) = 1;
3833 /* Otherwise, if we don't need an lvalue and have a literal directly
3834 substitute it. Check if it matches the gimple predicate, as
3835 otherwise we'd generate a new temporary, and we can as well just
3836 use the decl we already have. */
3837 else if (!TREE_ADDRESSABLE (decl)
3838 && init
3839 && (fallback & fb_lvalue) == 0
3840 && gimple_test_f (init))
3842 *expr_p = init;
3843 return GS_OK;
3846 /* Preliminarily mark non-addressed complex variables as eligible
3847 for promotion to gimple registers. We'll transform their uses
3848 as we find them. */
3849 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3850 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3851 && !TREE_THIS_VOLATILE (decl)
3852 && !needs_to_live_in_memory (decl))
3853 DECL_GIMPLE_REG_P (decl) = 1;
3855 /* If the decl is not addressable, then it is being used in some
3856 expression or on the right hand side of a statement, and it can
3857 be put into a readonly data section. */
3858 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3859 TREE_READONLY (decl) = 1;
3861 /* This decl isn't mentioned in the enclosing block, so add it to the
3862 list of temps. FIXME it seems a bit of a kludge to say that
3863 anonymous artificial vars aren't pushed, but everything else is. */
3864 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3865 gimple_add_tmp_var (decl);
3867 gimplify_and_add (decl_s, pre_p);
3868 *expr_p = decl;
3869 return GS_OK;
3872 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3873 return a new CONSTRUCTOR if something changed. */
3875 static tree
3876 optimize_compound_literals_in_ctor (tree orig_ctor)
3878 tree ctor = orig_ctor;
3879 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3880 unsigned int idx, num = vec_safe_length (elts);
3882 for (idx = 0; idx < num; idx++)
3884 tree value = (*elts)[idx].value;
3885 tree newval = value;
3886 if (TREE_CODE (value) == CONSTRUCTOR)
3887 newval = optimize_compound_literals_in_ctor (value);
3888 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3890 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3891 tree decl = DECL_EXPR_DECL (decl_s);
3892 tree init = DECL_INITIAL (decl);
3894 if (!TREE_ADDRESSABLE (value)
3895 && !TREE_ADDRESSABLE (decl)
3896 && init
3897 && TREE_CODE (init) == CONSTRUCTOR)
3898 newval = optimize_compound_literals_in_ctor (init);
3900 if (newval == value)
3901 continue;
3903 if (ctor == orig_ctor)
3905 ctor = copy_node (orig_ctor);
3906 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3907 elts = CONSTRUCTOR_ELTS (ctor);
3909 (*elts)[idx].value = newval;
3911 return ctor;
3914 /* A subroutine of gimplify_modify_expr. Break out elements of a
3915 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3917 Note that we still need to clear any elements that don't have explicit
3918 initializers, so if not all elements are initialized we keep the
3919 original MODIFY_EXPR, we just remove all of the constructor elements.
3921 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3922 GS_ERROR if we would have to create a temporary when gimplifying
3923 this constructor. Otherwise, return GS_OK.
3925 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3927 static enum gimplify_status
3928 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3929 bool want_value, bool notify_temp_creation)
3931 tree object, ctor, type;
3932 enum gimplify_status ret;
3933 vec<constructor_elt, va_gc> *elts;
3935 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3937 if (!notify_temp_creation)
3939 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3940 is_gimple_lvalue, fb_lvalue);
3941 if (ret == GS_ERROR)
3942 return ret;
3945 object = TREE_OPERAND (*expr_p, 0);
3946 ctor = TREE_OPERAND (*expr_p, 1) =
3947 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3948 type = TREE_TYPE (ctor);
3949 elts = CONSTRUCTOR_ELTS (ctor);
3950 ret = GS_ALL_DONE;
3952 switch (TREE_CODE (type))
3954 case RECORD_TYPE:
3955 case UNION_TYPE:
3956 case QUAL_UNION_TYPE:
3957 case ARRAY_TYPE:
3959 struct gimplify_init_ctor_preeval_data preeval_data;
3960 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3961 bool cleared, complete_p, valid_const_initializer;
3963 /* Aggregate types must lower constructors to initialization of
3964 individual elements. The exception is that a CONSTRUCTOR node
3965 with no elements indicates zero-initialization of the whole. */
3966 if (vec_safe_is_empty (elts))
3968 if (notify_temp_creation)
3969 return GS_OK;
3970 break;
3973 /* Fetch information about the constructor to direct later processing.
3974 We might want to make static versions of it in various cases, and
3975 can only do so if it known to be a valid constant initializer. */
3976 valid_const_initializer
3977 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3978 &num_ctor_elements, &complete_p);
3980 /* If a const aggregate variable is being initialized, then it
3981 should never be a lose to promote the variable to be static. */
3982 if (valid_const_initializer
3983 && num_nonzero_elements > 1
3984 && TREE_READONLY (object)
3985 && TREE_CODE (object) == VAR_DECL
3986 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3988 if (notify_temp_creation)
3989 return GS_ERROR;
3990 DECL_INITIAL (object) = ctor;
3991 TREE_STATIC (object) = 1;
3992 if (!DECL_NAME (object))
3993 DECL_NAME (object) = create_tmp_var_name ("C");
3994 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3996 /* ??? C++ doesn't automatically append a .<number> to the
3997 assembler name, and even when it does, it looks at FE private
3998 data structures to figure out what that number should be,
3999 which are not set for this variable. I suppose this is
4000 important for local statics for inline functions, which aren't
4001 "local" in the object file sense. So in order to get a unique
4002 TU-local symbol, we must invoke the lhd version now. */
4003 lhd_set_decl_assembler_name (object);
4005 *expr_p = NULL_TREE;
4006 break;
4009 /* If there are "lots" of initialized elements, even discounting
4010 those that are not address constants (and thus *must* be
4011 computed at runtime), then partition the constructor into
4012 constant and non-constant parts. Block copy the constant
4013 parts in, then generate code for the non-constant parts. */
4014 /* TODO. There's code in cp/typeck.c to do this. */
4016 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4017 /* store_constructor will ignore the clearing of variable-sized
4018 objects. Initializers for such objects must explicitly set
4019 every field that needs to be set. */
4020 cleared = false;
4021 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
4022 /* If the constructor isn't complete, clear the whole object
4023 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4025 ??? This ought not to be needed. For any element not present
4026 in the initializer, we should simply set them to zero. Except
4027 we'd need to *find* the elements that are not present, and that
4028 requires trickery to avoid quadratic compile-time behavior in
4029 large cases or excessive memory use in small cases. */
4030 cleared = true;
4031 else if (num_ctor_elements - num_nonzero_elements
4032 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4033 && num_nonzero_elements < num_ctor_elements / 4)
4034 /* If there are "lots" of zeros, it's more efficient to clear
4035 the memory and then set the nonzero elements. */
4036 cleared = true;
4037 else
4038 cleared = false;
4040 /* If there are "lots" of initialized elements, and all of them
4041 are valid address constants, then the entire initializer can
4042 be dropped to memory, and then memcpy'd out. Don't do this
4043 for sparse arrays, though, as it's more efficient to follow
4044 the standard CONSTRUCTOR behavior of memset followed by
4045 individual element initialization. Also don't do this for small
4046 all-zero initializers (which aren't big enough to merit
4047 clearing), and don't try to make bitwise copies of
4048 TREE_ADDRESSABLE types.
4050 We cannot apply such transformation when compiling chkp static
4051 initializer because creation of initializer image in the memory
4052 will require static initialization of bounds for it. It should
4053 result in another gimplification of similar initializer and we
4054 may fall into infinite loop. */
4055 if (valid_const_initializer
4056 && !(cleared || num_nonzero_elements == 0)
4057 && !TREE_ADDRESSABLE (type)
4058 && (!current_function_decl
4059 || !lookup_attribute ("chkp ctor",
4060 DECL_ATTRIBUTES (current_function_decl))))
4062 HOST_WIDE_INT size = int_size_in_bytes (type);
4063 unsigned int align;
4065 /* ??? We can still get unbounded array types, at least
4066 from the C++ front end. This seems wrong, but attempt
4067 to work around it for now. */
4068 if (size < 0)
4070 size = int_size_in_bytes (TREE_TYPE (object));
4071 if (size >= 0)
4072 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4075 /* Find the maximum alignment we can assume for the object. */
4076 /* ??? Make use of DECL_OFFSET_ALIGN. */
4077 if (DECL_P (object))
4078 align = DECL_ALIGN (object);
4079 else
4080 align = TYPE_ALIGN (type);
4082 /* Do a block move either if the size is so small as to make
4083 each individual move a sub-unit move on average, or if it
4084 is so large as to make individual moves inefficient. */
4085 if (size > 0
4086 && num_nonzero_elements > 1
4087 && (size < num_nonzero_elements
4088 || !can_move_by_pieces (size, align)))
4090 if (notify_temp_creation)
4091 return GS_ERROR;
4093 walk_tree (&ctor, force_labels_r, NULL, NULL);
4094 ctor = tree_output_constant_def (ctor);
4095 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4096 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4097 TREE_OPERAND (*expr_p, 1) = ctor;
4099 /* This is no longer an assignment of a CONSTRUCTOR, but
4100 we still may have processing to do on the LHS. So
4101 pretend we didn't do anything here to let that happen. */
4102 return GS_UNHANDLED;
4106 /* If the target is volatile, we have non-zero elements and more than
4107 one field to assign, initialize the target from a temporary. */
4108 if (TREE_THIS_VOLATILE (object)
4109 && !TREE_ADDRESSABLE (type)
4110 && num_nonzero_elements > 0
4111 && vec_safe_length (elts) > 1)
4113 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4114 TREE_OPERAND (*expr_p, 0) = temp;
4115 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4116 *expr_p,
4117 build2 (MODIFY_EXPR, void_type_node,
4118 object, temp));
4119 return GS_OK;
4122 if (notify_temp_creation)
4123 return GS_OK;
4125 /* If there are nonzero elements and if needed, pre-evaluate to capture
4126 elements overlapping with the lhs into temporaries. We must do this
4127 before clearing to fetch the values before they are zeroed-out. */
4128 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4130 preeval_data.lhs_base_decl = get_base_address (object);
4131 if (!DECL_P (preeval_data.lhs_base_decl))
4132 preeval_data.lhs_base_decl = NULL;
4133 preeval_data.lhs_alias_set = get_alias_set (object);
4135 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4136 pre_p, post_p, &preeval_data);
4139 bool ctor_has_side_effects_p
4140 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4142 if (cleared)
4144 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4145 Note that we still have to gimplify, in order to handle the
4146 case of variable sized types. Avoid shared tree structures. */
4147 CONSTRUCTOR_ELTS (ctor) = NULL;
4148 TREE_SIDE_EFFECTS (ctor) = 0;
4149 object = unshare_expr (object);
4150 gimplify_stmt (expr_p, pre_p);
4153 /* If we have not block cleared the object, or if there are nonzero
4154 elements in the constructor, or if the constructor has side effects,
4155 add assignments to the individual scalar fields of the object. */
4156 if (!cleared
4157 || num_nonzero_elements > 0
4158 || ctor_has_side_effects_p)
4159 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4161 *expr_p = NULL_TREE;
4163 break;
4165 case COMPLEX_TYPE:
4167 tree r, i;
4169 if (notify_temp_creation)
4170 return GS_OK;
4172 /* Extract the real and imaginary parts out of the ctor. */
4173 gcc_assert (elts->length () == 2);
4174 r = (*elts)[0].value;
4175 i = (*elts)[1].value;
4176 if (r == NULL || i == NULL)
4178 tree zero = build_zero_cst (TREE_TYPE (type));
4179 if (r == NULL)
4180 r = zero;
4181 if (i == NULL)
4182 i = zero;
4185 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4186 represent creation of a complex value. */
4187 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4189 ctor = build_complex (type, r, i);
4190 TREE_OPERAND (*expr_p, 1) = ctor;
4192 else
4194 ctor = build2 (COMPLEX_EXPR, type, r, i);
4195 TREE_OPERAND (*expr_p, 1) = ctor;
4196 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4197 pre_p,
4198 post_p,
4199 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4200 fb_rvalue);
4203 break;
4205 case VECTOR_TYPE:
4207 unsigned HOST_WIDE_INT ix;
4208 constructor_elt *ce;
4210 if (notify_temp_creation)
4211 return GS_OK;
4213 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4214 if (TREE_CONSTANT (ctor))
4216 bool constant_p = true;
4217 tree value;
4219 /* Even when ctor is constant, it might contain non-*_CST
4220 elements, such as addresses or trapping values like
4221 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4222 in VECTOR_CST nodes. */
4223 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4224 if (!CONSTANT_CLASS_P (value))
4226 constant_p = false;
4227 break;
4230 if (constant_p)
4232 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4233 break;
4236 TREE_CONSTANT (ctor) = 0;
4239 /* Vector types use CONSTRUCTOR all the way through gimple
4240 compilation as a general initializer. */
4241 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4243 enum gimplify_status tret;
4244 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4245 fb_rvalue);
4246 if (tret == GS_ERROR)
4247 ret = GS_ERROR;
4248 else if (TREE_STATIC (ctor)
4249 && !initializer_constant_valid_p (ce->value,
4250 TREE_TYPE (ce->value)))
4251 TREE_STATIC (ctor) = 0;
4253 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4254 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4256 break;
4258 default:
4259 /* So how did we get a CONSTRUCTOR for a scalar type? */
4260 gcc_unreachable ();
4263 if (ret == GS_ERROR)
4264 return GS_ERROR;
4265 else if (want_value)
4267 *expr_p = object;
4268 return GS_OK;
4270 else
4272 /* If we have gimplified both sides of the initializer but have
4273 not emitted an assignment, do so now. */
4274 if (*expr_p)
4276 tree lhs = TREE_OPERAND (*expr_p, 0);
4277 tree rhs = TREE_OPERAND (*expr_p, 1);
4278 gassign *init = gimple_build_assign (lhs, rhs);
4279 gimplify_seq_add_stmt (pre_p, init);
4280 *expr_p = NULL;
4283 return GS_ALL_DONE;
4287 /* Given a pointer value OP0, return a simplified version of an
4288 indirection through OP0, or NULL_TREE if no simplification is
4289 possible. This may only be applied to a rhs of an expression.
4290 Note that the resulting type may be different from the type pointed
4291 to in the sense that it is still compatible from the langhooks
4292 point of view. */
4294 static tree
4295 gimple_fold_indirect_ref_rhs (tree t)
4297 return gimple_fold_indirect_ref (t);
4300 /* Subroutine of gimplify_modify_expr to do simplifications of
4301 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4302 something changes. */
4304 static enum gimplify_status
4305 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4306 gimple_seq *pre_p, gimple_seq *post_p,
4307 bool want_value)
4309 enum gimplify_status ret = GS_UNHANDLED;
4310 bool changed;
4314 changed = false;
4315 switch (TREE_CODE (*from_p))
4317 case VAR_DECL:
4318 /* If we're assigning from a read-only variable initialized with
4319 a constructor, do the direct assignment from the constructor,
4320 but only if neither source nor target are volatile since this
4321 latter assignment might end up being done on a per-field basis. */
4322 if (DECL_INITIAL (*from_p)
4323 && TREE_READONLY (*from_p)
4324 && !TREE_THIS_VOLATILE (*from_p)
4325 && !TREE_THIS_VOLATILE (*to_p)
4326 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4328 tree old_from = *from_p;
4329 enum gimplify_status subret;
4331 /* Move the constructor into the RHS. */
4332 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4334 /* Let's see if gimplify_init_constructor will need to put
4335 it in memory. */
4336 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4337 false, true);
4338 if (subret == GS_ERROR)
4340 /* If so, revert the change. */
4341 *from_p = old_from;
4343 else
4345 ret = GS_OK;
4346 changed = true;
4349 break;
4350 case INDIRECT_REF:
4352 /* If we have code like
4354 *(const A*)(A*)&x
4356 where the type of "x" is a (possibly cv-qualified variant
4357 of "A"), treat the entire expression as identical to "x".
4358 This kind of code arises in C++ when an object is bound
4359 to a const reference, and if "x" is a TARGET_EXPR we want
4360 to take advantage of the optimization below. */
4361 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4362 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4363 if (t)
4365 if (TREE_THIS_VOLATILE (t) != volatile_p)
4367 if (DECL_P (t))
4368 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4369 build_fold_addr_expr (t));
4370 if (REFERENCE_CLASS_P (t))
4371 TREE_THIS_VOLATILE (t) = volatile_p;
4373 *from_p = t;
4374 ret = GS_OK;
4375 changed = true;
4377 break;
4380 case TARGET_EXPR:
4382 /* If we are initializing something from a TARGET_EXPR, strip the
4383 TARGET_EXPR and initialize it directly, if possible. This can't
4384 be done if the initializer is void, since that implies that the
4385 temporary is set in some non-trivial way.
4387 ??? What about code that pulls out the temp and uses it
4388 elsewhere? I think that such code never uses the TARGET_EXPR as
4389 an initializer. If I'm wrong, we'll die because the temp won't
4390 have any RTL. In that case, I guess we'll need to replace
4391 references somehow. */
4392 tree init = TARGET_EXPR_INITIAL (*from_p);
4394 if (init
4395 && !VOID_TYPE_P (TREE_TYPE (init)))
4397 *from_p = init;
4398 ret = GS_OK;
4399 changed = true;
4402 break;
4404 case COMPOUND_EXPR:
4405 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4406 caught. */
4407 gimplify_compound_expr (from_p, pre_p, true);
4408 ret = GS_OK;
4409 changed = true;
4410 break;
4412 case CONSTRUCTOR:
4413 /* If we already made some changes, let the front end have a
4414 crack at this before we break it down. */
4415 if (ret != GS_UNHANDLED)
4416 break;
4417 /* If we're initializing from a CONSTRUCTOR, break this into
4418 individual MODIFY_EXPRs. */
4419 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4420 false);
4422 case COND_EXPR:
4423 /* If we're assigning to a non-register type, push the assignment
4424 down into the branches. This is mandatory for ADDRESSABLE types,
4425 since we cannot generate temporaries for such, but it saves a
4426 copy in other cases as well. */
4427 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4429 /* This code should mirror the code in gimplify_cond_expr. */
4430 enum tree_code code = TREE_CODE (*expr_p);
4431 tree cond = *from_p;
4432 tree result = *to_p;
4434 ret = gimplify_expr (&result, pre_p, post_p,
4435 is_gimple_lvalue, fb_lvalue);
4436 if (ret != GS_ERROR)
4437 ret = GS_OK;
4439 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4440 TREE_OPERAND (cond, 1)
4441 = build2 (code, void_type_node, result,
4442 TREE_OPERAND (cond, 1));
4443 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4444 TREE_OPERAND (cond, 2)
4445 = build2 (code, void_type_node, unshare_expr (result),
4446 TREE_OPERAND (cond, 2));
4448 TREE_TYPE (cond) = void_type_node;
4449 recalculate_side_effects (cond);
4451 if (want_value)
4453 gimplify_and_add (cond, pre_p);
4454 *expr_p = unshare_expr (result);
4456 else
4457 *expr_p = cond;
4458 return ret;
4460 break;
4462 case CALL_EXPR:
4463 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4464 return slot so that we don't generate a temporary. */
4465 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4466 && aggregate_value_p (*from_p, *from_p))
4468 bool use_target;
4470 if (!(rhs_predicate_for (*to_p))(*from_p))
4471 /* If we need a temporary, *to_p isn't accurate. */
4472 use_target = false;
4473 /* It's OK to use the return slot directly unless it's an NRV. */
4474 else if (TREE_CODE (*to_p) == RESULT_DECL
4475 && DECL_NAME (*to_p) == NULL_TREE
4476 && needs_to_live_in_memory (*to_p))
4477 use_target = true;
4478 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4479 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4480 /* Don't force regs into memory. */
4481 use_target = false;
4482 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4483 /* It's OK to use the target directly if it's being
4484 initialized. */
4485 use_target = true;
4486 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
4487 != INTEGER_CST)
4488 /* Always use the target and thus RSO for variable-sized types.
4489 GIMPLE cannot deal with a variable-sized assignment
4490 embedded in a call statement. */
4491 use_target = true;
4492 else if (TREE_CODE (*to_p) != SSA_NAME
4493 && (!is_gimple_variable (*to_p)
4494 || needs_to_live_in_memory (*to_p)))
4495 /* Don't use the original target if it's already addressable;
4496 if its address escapes, and the called function uses the
4497 NRV optimization, a conforming program could see *to_p
4498 change before the called function returns; see c++/19317.
4499 When optimizing, the return_slot pass marks more functions
4500 as safe after we have escape info. */
4501 use_target = false;
4502 else
4503 use_target = true;
4505 if (use_target)
4507 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4508 mark_addressable (*to_p);
4511 break;
4513 case WITH_SIZE_EXPR:
4514 /* Likewise for calls that return an aggregate of non-constant size,
4515 since we would not be able to generate a temporary at all. */
4516 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4518 *from_p = TREE_OPERAND (*from_p, 0);
4519 /* We don't change ret in this case because the
4520 WITH_SIZE_EXPR might have been added in
4521 gimplify_modify_expr, so returning GS_OK would lead to an
4522 infinite loop. */
4523 changed = true;
4525 break;
4527 /* If we're initializing from a container, push the initialization
4528 inside it. */
4529 case CLEANUP_POINT_EXPR:
4530 case BIND_EXPR:
4531 case STATEMENT_LIST:
4533 tree wrap = *from_p;
4534 tree t;
4536 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4537 fb_lvalue);
4538 if (ret != GS_ERROR)
4539 ret = GS_OK;
4541 t = voidify_wrapper_expr (wrap, *expr_p);
4542 gcc_assert (t == *expr_p);
4544 if (want_value)
4546 gimplify_and_add (wrap, pre_p);
4547 *expr_p = unshare_expr (*to_p);
4549 else
4550 *expr_p = wrap;
4551 return GS_OK;
4554 case COMPOUND_LITERAL_EXPR:
4556 tree complit = TREE_OPERAND (*expr_p, 1);
4557 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4558 tree decl = DECL_EXPR_DECL (decl_s);
4559 tree init = DECL_INITIAL (decl);
4561 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4562 into struct T x = { 0, 1, 2 } if the address of the
4563 compound literal has never been taken. */
4564 if (!TREE_ADDRESSABLE (complit)
4565 && !TREE_ADDRESSABLE (decl)
4566 && init)
4568 *expr_p = copy_node (*expr_p);
4569 TREE_OPERAND (*expr_p, 1) = init;
4570 return GS_OK;
4574 default:
4575 break;
4578 while (changed);
4580 return ret;
4584 /* Return true if T looks like a valid GIMPLE statement. */
4586 static bool
4587 is_gimple_stmt (tree t)
4589 const enum tree_code code = TREE_CODE (t);
4591 switch (code)
4593 case NOP_EXPR:
4594 /* The only valid NOP_EXPR is the empty statement. */
4595 return IS_EMPTY_STMT (t);
4597 case BIND_EXPR:
4598 case COND_EXPR:
4599 /* These are only valid if they're void. */
4600 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4602 case SWITCH_EXPR:
4603 case GOTO_EXPR:
4604 case RETURN_EXPR:
4605 case LABEL_EXPR:
4606 case CASE_LABEL_EXPR:
4607 case TRY_CATCH_EXPR:
4608 case TRY_FINALLY_EXPR:
4609 case EH_FILTER_EXPR:
4610 case CATCH_EXPR:
4611 case ASM_EXPR:
4612 case STATEMENT_LIST:
4613 case OACC_PARALLEL:
4614 case OACC_KERNELS:
4615 case OACC_DATA:
4616 case OACC_HOST_DATA:
4617 case OACC_DECLARE:
4618 case OACC_UPDATE:
4619 case OACC_ENTER_DATA:
4620 case OACC_EXIT_DATA:
4621 case OACC_CACHE:
4622 case OMP_PARALLEL:
4623 case OMP_FOR:
4624 case OMP_SIMD:
4625 case CILK_SIMD:
4626 case OMP_DISTRIBUTE:
4627 case OACC_LOOP:
4628 case OMP_SECTIONS:
4629 case OMP_SECTION:
4630 case OMP_SINGLE:
4631 case OMP_MASTER:
4632 case OMP_TASKGROUP:
4633 case OMP_ORDERED:
4634 case OMP_CRITICAL:
4635 case OMP_TASK:
4636 case OMP_TARGET:
4637 case OMP_TARGET_DATA:
4638 case OMP_TARGET_UPDATE:
4639 case OMP_TARGET_ENTER_DATA:
4640 case OMP_TARGET_EXIT_DATA:
4641 case OMP_TASKLOOP:
4642 case OMP_TEAMS:
4643 /* These are always void. */
4644 return true;
4646 case CALL_EXPR:
4647 case MODIFY_EXPR:
4648 case PREDICT_EXPR:
4649 /* These are valid regardless of their type. */
4650 return true;
4652 default:
4653 return false;
4658 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4659 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4660 DECL_GIMPLE_REG_P set.
4662 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4663 other, unmodified part of the complex object just before the total store.
4664 As a consequence, if the object is still uninitialized, an undefined value
4665 will be loaded into a register, which may result in a spurious exception
4666 if the register is floating-point and the value happens to be a signaling
4667 NaN for example. Then the fully-fledged complex operations lowering pass
4668 followed by a DCE pass are necessary in order to fix things up. */
4670 static enum gimplify_status
4671 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4672 bool want_value)
4674 enum tree_code code, ocode;
4675 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4677 lhs = TREE_OPERAND (*expr_p, 0);
4678 rhs = TREE_OPERAND (*expr_p, 1);
4679 code = TREE_CODE (lhs);
4680 lhs = TREE_OPERAND (lhs, 0);
4682 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4683 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4684 TREE_NO_WARNING (other) = 1;
4685 other = get_formal_tmp_var (other, pre_p);
4687 realpart = code == REALPART_EXPR ? rhs : other;
4688 imagpart = code == REALPART_EXPR ? other : rhs;
4690 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4691 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4692 else
4693 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4695 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4696 *expr_p = (want_value) ? rhs : NULL_TREE;
4698 return GS_ALL_DONE;
4701 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4703 modify_expr
4704 : varname '=' rhs
4705 | '*' ID '=' rhs
4707 PRE_P points to the list where side effects that must happen before
4708 *EXPR_P should be stored.
4710 POST_P points to the list where side effects that must happen after
4711 *EXPR_P should be stored.
4713 WANT_VALUE is nonzero iff we want to use the value of this expression
4714 in another expression. */
4716 static enum gimplify_status
4717 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4718 bool want_value)
4720 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4721 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4722 enum gimplify_status ret = GS_UNHANDLED;
4723 gimple *assign;
4724 location_t loc = EXPR_LOCATION (*expr_p);
4725 gimple_stmt_iterator gsi;
4727 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4728 || TREE_CODE (*expr_p) == INIT_EXPR);
4730 /* Trying to simplify a clobber using normal logic doesn't work,
4731 so handle it here. */
4732 if (TREE_CLOBBER_P (*from_p))
4734 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4735 if (ret == GS_ERROR)
4736 return ret;
4737 gcc_assert (!want_value
4738 && (TREE_CODE (*to_p) == VAR_DECL
4739 || TREE_CODE (*to_p) == MEM_REF));
4740 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4741 *expr_p = NULL;
4742 return GS_ALL_DONE;
4745 /* Insert pointer conversions required by the middle-end that are not
4746 required by the frontend. This fixes middle-end type checking for
4747 for example gcc.dg/redecl-6.c. */
4748 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4750 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4751 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4752 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4755 /* See if any simplifications can be done based on what the RHS is. */
4756 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4757 want_value);
4758 if (ret != GS_UNHANDLED)
4759 return ret;
4761 /* For zero sized types only gimplify the left hand side and right hand
4762 side as statements and throw away the assignment. Do this after
4763 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4764 types properly. */
4765 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4767 gimplify_stmt (from_p, pre_p);
4768 gimplify_stmt (to_p, pre_p);
4769 *expr_p = NULL_TREE;
4770 return GS_ALL_DONE;
4773 /* If the value being copied is of variable width, compute the length
4774 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4775 before gimplifying any of the operands so that we can resolve any
4776 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4777 the size of the expression to be copied, not of the destination, so
4778 that is what we must do here. */
4779 maybe_with_size_expr (from_p);
4781 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4782 if (ret == GS_ERROR)
4783 return ret;
4785 /* As a special case, we have to temporarily allow for assignments
4786 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4787 a toplevel statement, when gimplifying the GENERIC expression
4788 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4789 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4791 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4792 prevent gimplify_expr from trying to create a new temporary for
4793 foo's LHS, we tell it that it should only gimplify until it
4794 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4795 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4796 and all we need to do here is set 'a' to be its LHS. */
4797 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4798 fb_rvalue);
4799 if (ret == GS_ERROR)
4800 return ret;
4802 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
4803 size as argument to the call. */
4804 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4806 tree call = TREE_OPERAND (*from_p, 0);
4807 tree vlasize = TREE_OPERAND (*from_p, 1);
4809 if (TREE_CODE (call) == CALL_EXPR
4810 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
4812 int nargs = call_expr_nargs (call);
4813 tree type = TREE_TYPE (call);
4814 tree ap = CALL_EXPR_ARG (call, 0);
4815 tree tag = CALL_EXPR_ARG (call, 1);
4816 tree aptag = CALL_EXPR_ARG (call, 2);
4817 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
4818 IFN_VA_ARG, type,
4819 nargs + 1, ap, tag,
4820 aptag, vlasize);
4821 TREE_OPERAND (*from_p, 0) = newcall;
4825 /* Now see if the above changed *from_p to something we handle specially. */
4826 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4827 want_value);
4828 if (ret != GS_UNHANDLED)
4829 return ret;
4831 /* If we've got a variable sized assignment between two lvalues (i.e. does
4832 not involve a call), then we can make things a bit more straightforward
4833 by converting the assignment to memcpy or memset. */
4834 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4836 tree from = TREE_OPERAND (*from_p, 0);
4837 tree size = TREE_OPERAND (*from_p, 1);
4839 if (TREE_CODE (from) == CONSTRUCTOR)
4840 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4842 if (is_gimple_addressable (from))
4844 *from_p = from;
4845 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4846 pre_p);
4850 /* Transform partial stores to non-addressable complex variables into
4851 total stores. This allows us to use real instead of virtual operands
4852 for these variables, which improves optimization. */
4853 if ((TREE_CODE (*to_p) == REALPART_EXPR
4854 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4855 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4856 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4858 /* Try to alleviate the effects of the gimplification creating artificial
4859 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
4860 make sure not to create DECL_DEBUG_EXPR links across functions. */
4861 if (!gimplify_ctxp->into_ssa
4862 && TREE_CODE (*from_p) == VAR_DECL
4863 && DECL_IGNORED_P (*from_p)
4864 && DECL_P (*to_p)
4865 && !DECL_IGNORED_P (*to_p)
4866 && decl_function_context (*to_p) == current_function_decl)
4868 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4869 DECL_NAME (*from_p)
4870 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4871 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4872 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4875 if (want_value && TREE_THIS_VOLATILE (*to_p))
4876 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4878 if (TREE_CODE (*from_p) == CALL_EXPR)
4880 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4881 instead of a GIMPLE_ASSIGN. */
4882 gcall *call_stmt;
4883 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
4885 /* Gimplify internal functions created in the FEs. */
4886 int nargs = call_expr_nargs (*from_p), i;
4887 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
4888 auto_vec<tree> vargs (nargs);
4890 for (i = 0; i < nargs; i++)
4892 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
4893 EXPR_LOCATION (*from_p));
4894 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
4896 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
4897 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
4899 else
4901 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4902 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4903 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4904 tree fndecl = get_callee_fndecl (*from_p);
4905 if (fndecl
4906 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4907 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4908 && call_expr_nargs (*from_p) == 3)
4909 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4910 CALL_EXPR_ARG (*from_p, 0),
4911 CALL_EXPR_ARG (*from_p, 1),
4912 CALL_EXPR_ARG (*from_p, 2));
4913 else
4915 call_stmt = gimple_build_call_from_tree (*from_p);
4916 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
4919 notice_special_calls (call_stmt);
4920 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
4921 gimple_call_set_lhs (call_stmt, *to_p);
4922 else if (TREE_CODE (*to_p) == SSA_NAME)
4923 /* The above is somewhat premature, avoid ICEing later for a
4924 SSA name w/o a definition. We may have uses in the GIMPLE IL.
4925 ??? This doesn't make it a default-def. */
4926 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
4927 assign = call_stmt;
4929 else
4931 assign = gimple_build_assign (*to_p, *from_p);
4932 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4933 if (COMPARISON_CLASS_P (*from_p))
4934 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
4937 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4939 /* We should have got an SSA name from the start. */
4940 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
4941 || ! gimple_in_ssa_p (cfun));
4944 gimplify_seq_add_stmt (pre_p, assign);
4945 gsi = gsi_last (*pre_p);
4946 maybe_fold_stmt (&gsi);
4948 if (want_value)
4950 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4951 return GS_OK;
4953 else
4954 *expr_p = NULL;
4956 return GS_ALL_DONE;
4959 /* Gimplify a comparison between two variable-sized objects. Do this
4960 with a call to BUILT_IN_MEMCMP. */
4962 static enum gimplify_status
4963 gimplify_variable_sized_compare (tree *expr_p)
4965 location_t loc = EXPR_LOCATION (*expr_p);
4966 tree op0 = TREE_OPERAND (*expr_p, 0);
4967 tree op1 = TREE_OPERAND (*expr_p, 1);
4968 tree t, arg, dest, src, expr;
4970 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4971 arg = unshare_expr (arg);
4972 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4973 src = build_fold_addr_expr_loc (loc, op1);
4974 dest = build_fold_addr_expr_loc (loc, op0);
4975 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4976 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4978 expr
4979 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4980 SET_EXPR_LOCATION (expr, loc);
4981 *expr_p = expr;
4983 return GS_OK;
4986 /* Gimplify a comparison between two aggregate objects of integral scalar
4987 mode as a comparison between the bitwise equivalent scalar values. */
4989 static enum gimplify_status
4990 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4992 location_t loc = EXPR_LOCATION (*expr_p);
4993 tree op0 = TREE_OPERAND (*expr_p, 0);
4994 tree op1 = TREE_OPERAND (*expr_p, 1);
4996 tree type = TREE_TYPE (op0);
4997 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4999 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5000 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5002 *expr_p
5003 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5005 return GS_OK;
5008 /* Gimplify an expression sequence. This function gimplifies each
5009 expression and rewrites the original expression with the last
5010 expression of the sequence in GIMPLE form.
5012 PRE_P points to the list where the side effects for all the
5013 expressions in the sequence will be emitted.
5015 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5017 static enum gimplify_status
5018 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5020 tree t = *expr_p;
5024 tree *sub_p = &TREE_OPERAND (t, 0);
5026 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5027 gimplify_compound_expr (sub_p, pre_p, false);
5028 else
5029 gimplify_stmt (sub_p, pre_p);
5031 t = TREE_OPERAND (t, 1);
5033 while (TREE_CODE (t) == COMPOUND_EXPR);
5035 *expr_p = t;
5036 if (want_value)
5037 return GS_OK;
5038 else
5040 gimplify_stmt (expr_p, pre_p);
5041 return GS_ALL_DONE;
5045 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5046 gimplify. After gimplification, EXPR_P will point to a new temporary
5047 that holds the original value of the SAVE_EXPR node.
5049 PRE_P points to the list where side effects that must happen before
5050 *EXPR_P should be stored. */
5052 static enum gimplify_status
5053 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5055 enum gimplify_status ret = GS_ALL_DONE;
5056 tree val;
5058 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5059 val = TREE_OPERAND (*expr_p, 0);
5061 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5062 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5064 /* The operand may be a void-valued expression such as SAVE_EXPRs
5065 generated by the Java frontend for class initialization. It is
5066 being executed only for its side-effects. */
5067 if (TREE_TYPE (val) == void_type_node)
5069 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5070 is_gimple_stmt, fb_none);
5071 val = NULL;
5073 else
5074 /* The temporary may not be an SSA name as later abnormal and EH
5075 control flow may invalidate use/def domination. */
5076 val = get_initialized_tmp_var (val, pre_p, post_p, false);
5078 TREE_OPERAND (*expr_p, 0) = val;
5079 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5082 *expr_p = val;
5084 return ret;
5087 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5089 unary_expr
5090 : ...
5091 | '&' varname
5094 PRE_P points to the list where side effects that must happen before
5095 *EXPR_P should be stored.
5097 POST_P points to the list where side effects that must happen after
5098 *EXPR_P should be stored. */
5100 static enum gimplify_status
5101 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5103 tree expr = *expr_p;
5104 tree op0 = TREE_OPERAND (expr, 0);
5105 enum gimplify_status ret;
5106 location_t loc = EXPR_LOCATION (*expr_p);
5108 switch (TREE_CODE (op0))
5110 case INDIRECT_REF:
5111 do_indirect_ref:
5112 /* Check if we are dealing with an expression of the form '&*ptr'.
5113 While the front end folds away '&*ptr' into 'ptr', these
5114 expressions may be generated internally by the compiler (e.g.,
5115 builtins like __builtin_va_end). */
5116 /* Caution: the silent array decomposition semantics we allow for
5117 ADDR_EXPR means we can't always discard the pair. */
5118 /* Gimplification of the ADDR_EXPR operand may drop
5119 cv-qualification conversions, so make sure we add them if
5120 needed. */
5122 tree op00 = TREE_OPERAND (op0, 0);
5123 tree t_expr = TREE_TYPE (expr);
5124 tree t_op00 = TREE_TYPE (op00);
5126 if (!useless_type_conversion_p (t_expr, t_op00))
5127 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5128 *expr_p = op00;
5129 ret = GS_OK;
5131 break;
5133 case VIEW_CONVERT_EXPR:
5134 /* Take the address of our operand and then convert it to the type of
5135 this ADDR_EXPR.
5137 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5138 all clear. The impact of this transformation is even less clear. */
5140 /* If the operand is a useless conversion, look through it. Doing so
5141 guarantees that the ADDR_EXPR and its operand will remain of the
5142 same type. */
5143 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5144 op0 = TREE_OPERAND (op0, 0);
5146 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5147 build_fold_addr_expr_loc (loc,
5148 TREE_OPERAND (op0, 0)));
5149 ret = GS_OK;
5150 break;
5152 case MEM_REF:
5153 if (integer_zerop (TREE_OPERAND (op0, 1)))
5154 goto do_indirect_ref;
5156 /* ... fall through ... */
5158 default:
5159 /* If we see a call to a declared builtin or see its address
5160 being taken (we can unify those cases here) then we can mark
5161 the builtin for implicit generation by GCC. */
5162 if (TREE_CODE (op0) == FUNCTION_DECL
5163 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5164 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5165 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5167 /* We use fb_either here because the C frontend sometimes takes
5168 the address of a call that returns a struct; see
5169 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5170 the implied temporary explicit. */
5172 /* Make the operand addressable. */
5173 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5174 is_gimple_addressable, fb_either);
5175 if (ret == GS_ERROR)
5176 break;
5178 /* Then mark it. Beware that it may not be possible to do so directly
5179 if a temporary has been created by the gimplification. */
5180 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5182 op0 = TREE_OPERAND (expr, 0);
5184 /* For various reasons, the gimplification of the expression
5185 may have made a new INDIRECT_REF. */
5186 if (TREE_CODE (op0) == INDIRECT_REF)
5187 goto do_indirect_ref;
5189 mark_addressable (TREE_OPERAND (expr, 0));
5191 /* The FEs may end up building ADDR_EXPRs early on a decl with
5192 an incomplete type. Re-build ADDR_EXPRs in canonical form
5193 here. */
5194 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5195 *expr_p = build_fold_addr_expr (op0);
5197 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5198 recompute_tree_invariant_for_addr_expr (*expr_p);
5200 /* If we re-built the ADDR_EXPR add a conversion to the original type
5201 if required. */
5202 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5203 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5205 break;
5208 return ret;
5211 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5212 value; output operands should be a gimple lvalue. */
5214 static enum gimplify_status
5215 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5217 tree expr;
5218 int noutputs;
5219 const char **oconstraints;
5220 int i;
5221 tree link;
5222 const char *constraint;
5223 bool allows_mem, allows_reg, is_inout;
5224 enum gimplify_status ret, tret;
5225 gasm *stmt;
5226 vec<tree, va_gc> *inputs;
5227 vec<tree, va_gc> *outputs;
5228 vec<tree, va_gc> *clobbers;
5229 vec<tree, va_gc> *labels;
5230 tree link_next;
5232 expr = *expr_p;
5233 noutputs = list_length (ASM_OUTPUTS (expr));
5234 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5236 inputs = NULL;
5237 outputs = NULL;
5238 clobbers = NULL;
5239 labels = NULL;
5241 ret = GS_ALL_DONE;
5242 link_next = NULL_TREE;
5243 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5245 bool ok;
5246 size_t constraint_len;
5248 link_next = TREE_CHAIN (link);
5250 oconstraints[i]
5251 = constraint
5252 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5253 constraint_len = strlen (constraint);
5254 if (constraint_len == 0)
5255 continue;
5257 ok = parse_output_constraint (&constraint, i, 0, 0,
5258 &allows_mem, &allows_reg, &is_inout);
5259 if (!ok)
5261 ret = GS_ERROR;
5262 is_inout = false;
5265 if (!allows_reg && allows_mem)
5266 mark_addressable (TREE_VALUE (link));
5268 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5269 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5270 fb_lvalue | fb_mayfail);
5271 if (tret == GS_ERROR)
5273 error ("invalid lvalue in asm output %d", i);
5274 ret = tret;
5277 /* If the constraint does not allow memory make sure we gimplify
5278 it to a register if it is not already but its base is. This
5279 happens for complex and vector components. */
5280 if (!allows_mem)
5282 tree op = TREE_VALUE (link);
5283 if (! is_gimple_val (op)
5284 && is_gimple_reg_type (TREE_TYPE (op))
5285 && is_gimple_reg (get_base_address (op)))
5287 tree tem = create_tmp_reg (TREE_TYPE (op));
5288 tree ass;
5289 if (is_inout)
5291 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
5292 tem, unshare_expr (op));
5293 gimplify_and_add (ass, pre_p);
5295 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
5296 gimplify_and_add (ass, post_p);
5298 TREE_VALUE (link) = tem;
5299 tret = GS_OK;
5303 vec_safe_push (outputs, link);
5304 TREE_CHAIN (link) = NULL_TREE;
5306 if (is_inout)
5308 /* An input/output operand. To give the optimizers more
5309 flexibility, split it into separate input and output
5310 operands. */
5311 tree input;
5312 char buf[10];
5314 /* Turn the in/out constraint into an output constraint. */
5315 char *p = xstrdup (constraint);
5316 p[0] = '=';
5317 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5319 /* And add a matching input constraint. */
5320 if (allows_reg)
5322 sprintf (buf, "%d", i);
5324 /* If there are multiple alternatives in the constraint,
5325 handle each of them individually. Those that allow register
5326 will be replaced with operand number, the others will stay
5327 unchanged. */
5328 if (strchr (p, ',') != NULL)
5330 size_t len = 0, buflen = strlen (buf);
5331 char *beg, *end, *str, *dst;
5333 for (beg = p + 1;;)
5335 end = strchr (beg, ',');
5336 if (end == NULL)
5337 end = strchr (beg, '\0');
5338 if ((size_t) (end - beg) < buflen)
5339 len += buflen + 1;
5340 else
5341 len += end - beg + 1;
5342 if (*end)
5343 beg = end + 1;
5344 else
5345 break;
5348 str = (char *) alloca (len);
5349 for (beg = p + 1, dst = str;;)
5351 const char *tem;
5352 bool mem_p, reg_p, inout_p;
5354 end = strchr (beg, ',');
5355 if (end)
5356 *end = '\0';
5357 beg[-1] = '=';
5358 tem = beg - 1;
5359 parse_output_constraint (&tem, i, 0, 0,
5360 &mem_p, &reg_p, &inout_p);
5361 if (dst != str)
5362 *dst++ = ',';
5363 if (reg_p)
5365 memcpy (dst, buf, buflen);
5366 dst += buflen;
5368 else
5370 if (end)
5371 len = end - beg;
5372 else
5373 len = strlen (beg);
5374 memcpy (dst, beg, len);
5375 dst += len;
5377 if (end)
5378 beg = end + 1;
5379 else
5380 break;
5382 *dst = '\0';
5383 input = build_string (dst - str, str);
5385 else
5386 input = build_string (strlen (buf), buf);
5388 else
5389 input = build_string (constraint_len - 1, constraint + 1);
5391 free (p);
5393 input = build_tree_list (build_tree_list (NULL_TREE, input),
5394 unshare_expr (TREE_VALUE (link)));
5395 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5399 link_next = NULL_TREE;
5400 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5402 link_next = TREE_CHAIN (link);
5403 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5404 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5405 oconstraints, &allows_mem, &allows_reg);
5407 /* If we can't make copies, we can only accept memory. */
5408 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5410 if (allows_mem)
5411 allows_reg = 0;
5412 else
5414 error ("impossible constraint in %<asm%>");
5415 error ("non-memory input %d must stay in memory", i);
5416 return GS_ERROR;
5420 /* If the operand is a memory input, it should be an lvalue. */
5421 if (!allows_reg && allows_mem)
5423 tree inputv = TREE_VALUE (link);
5424 STRIP_NOPS (inputv);
5425 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5426 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5427 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5428 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
5429 || TREE_CODE (inputv) == MODIFY_EXPR)
5430 TREE_VALUE (link) = error_mark_node;
5431 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5432 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5433 if (tret != GS_ERROR)
5435 /* Unlike output operands, memory inputs are not guaranteed
5436 to be lvalues by the FE, and while the expressions are
5437 marked addressable there, if it is e.g. a statement
5438 expression, temporaries in it might not end up being
5439 addressable. They might be already used in the IL and thus
5440 it is too late to make them addressable now though. */
5441 tree x = TREE_VALUE (link);
5442 while (handled_component_p (x))
5443 x = TREE_OPERAND (x, 0);
5444 if (TREE_CODE (x) == MEM_REF
5445 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
5446 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
5447 if ((TREE_CODE (x) == VAR_DECL
5448 || TREE_CODE (x) == PARM_DECL
5449 || TREE_CODE (x) == RESULT_DECL)
5450 && !TREE_ADDRESSABLE (x)
5451 && is_gimple_reg (x))
5453 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
5454 input_location), 0,
5455 "memory input %d is not directly addressable",
5457 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
5460 mark_addressable (TREE_VALUE (link));
5461 if (tret == GS_ERROR)
5463 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
5464 "memory input %d is not directly addressable", i);
5465 ret = tret;
5468 else
5470 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5471 is_gimple_asm_val, fb_rvalue);
5472 if (tret == GS_ERROR)
5473 ret = tret;
5476 TREE_CHAIN (link) = NULL_TREE;
5477 vec_safe_push (inputs, link);
5480 link_next = NULL_TREE;
5481 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5483 link_next = TREE_CHAIN (link);
5484 TREE_CHAIN (link) = NULL_TREE;
5485 vec_safe_push (clobbers, link);
5488 link_next = NULL_TREE;
5489 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5491 link_next = TREE_CHAIN (link);
5492 TREE_CHAIN (link) = NULL_TREE;
5493 vec_safe_push (labels, link);
5496 /* Do not add ASMs with errors to the gimple IL stream. */
5497 if (ret != GS_ERROR)
5499 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5500 inputs, outputs, clobbers, labels);
5502 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
5503 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5505 gimplify_seq_add_stmt (pre_p, stmt);
5508 return ret;
5511 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5512 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5513 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5514 return to this function.
5516 FIXME should we complexify the prequeue handling instead? Or use flags
5517 for all the cleanups and let the optimizer tighten them up? The current
5518 code seems pretty fragile; it will break on a cleanup within any
5519 non-conditional nesting. But any such nesting would be broken, anyway;
5520 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5521 and continues out of it. We can do that at the RTL level, though, so
5522 having an optimizer to tighten up try/finally regions would be a Good
5523 Thing. */
5525 static enum gimplify_status
5526 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5528 gimple_stmt_iterator iter;
5529 gimple_seq body_sequence = NULL;
5531 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5533 /* We only care about the number of conditions between the innermost
5534 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5535 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5536 int old_conds = gimplify_ctxp->conditions;
5537 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5538 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5539 gimplify_ctxp->conditions = 0;
5540 gimplify_ctxp->conditional_cleanups = NULL;
5541 gimplify_ctxp->in_cleanup_point_expr = true;
5543 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5545 gimplify_ctxp->conditions = old_conds;
5546 gimplify_ctxp->conditional_cleanups = old_cleanups;
5547 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5549 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5551 gimple *wce = gsi_stmt (iter);
5553 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5555 if (gsi_one_before_end_p (iter))
5557 /* Note that gsi_insert_seq_before and gsi_remove do not
5558 scan operands, unlike some other sequence mutators. */
5559 if (!gimple_wce_cleanup_eh_only (wce))
5560 gsi_insert_seq_before_without_update (&iter,
5561 gimple_wce_cleanup (wce),
5562 GSI_SAME_STMT);
5563 gsi_remove (&iter, true);
5564 break;
5566 else
5568 gtry *gtry;
5569 gimple_seq seq;
5570 enum gimple_try_flags kind;
5572 if (gimple_wce_cleanup_eh_only (wce))
5573 kind = GIMPLE_TRY_CATCH;
5574 else
5575 kind = GIMPLE_TRY_FINALLY;
5576 seq = gsi_split_seq_after (iter);
5578 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5579 /* Do not use gsi_replace here, as it may scan operands.
5580 We want to do a simple structural modification only. */
5581 gsi_set_stmt (&iter, gtry);
5582 iter = gsi_start (gtry->eval);
5585 else
5586 gsi_next (&iter);
5589 gimplify_seq_add_seq (pre_p, body_sequence);
5590 if (temp)
5592 *expr_p = temp;
5593 return GS_OK;
5595 else
5597 *expr_p = NULL;
5598 return GS_ALL_DONE;
5602 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5603 is the cleanup action required. EH_ONLY is true if the cleanup should
5604 only be executed if an exception is thrown, not on normal exit. */
5606 static void
5607 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5609 gimple *wce;
5610 gimple_seq cleanup_stmts = NULL;
5612 /* Errors can result in improperly nested cleanups. Which results in
5613 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5614 if (seen_error ())
5615 return;
5617 if (gimple_conditional_context ())
5619 /* If we're in a conditional context, this is more complex. We only
5620 want to run the cleanup if we actually ran the initialization that
5621 necessitates it, but we want to run it after the end of the
5622 conditional context. So we wrap the try/finally around the
5623 condition and use a flag to determine whether or not to actually
5624 run the destructor. Thus
5626 test ? f(A()) : 0
5628 becomes (approximately)
5630 flag = 0;
5631 try {
5632 if (test) { A::A(temp); flag = 1; val = f(temp); }
5633 else { val = 0; }
5634 } finally {
5635 if (flag) A::~A(temp);
5639 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5640 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
5641 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
5643 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5644 gimplify_stmt (&cleanup, &cleanup_stmts);
5645 wce = gimple_build_wce (cleanup_stmts);
5647 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5648 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5649 gimplify_seq_add_stmt (pre_p, ftrue);
5651 /* Because of this manipulation, and the EH edges that jump
5652 threading cannot redirect, the temporary (VAR) will appear
5653 to be used uninitialized. Don't warn. */
5654 TREE_NO_WARNING (var) = 1;
5656 else
5658 gimplify_stmt (&cleanup, &cleanup_stmts);
5659 wce = gimple_build_wce (cleanup_stmts);
5660 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5661 gimplify_seq_add_stmt (pre_p, wce);
5665 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5667 static enum gimplify_status
5668 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5670 tree targ = *expr_p;
5671 tree temp = TARGET_EXPR_SLOT (targ);
5672 tree init = TARGET_EXPR_INITIAL (targ);
5673 enum gimplify_status ret;
5675 if (init)
5677 tree cleanup = NULL_TREE;
5679 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5680 to the temps list. Handle also variable length TARGET_EXPRs. */
5681 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5683 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5684 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5685 gimplify_vla_decl (temp, pre_p);
5687 else
5688 gimple_add_tmp_var (temp);
5690 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5691 expression is supposed to initialize the slot. */
5692 if (VOID_TYPE_P (TREE_TYPE (init)))
5693 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5694 else
5696 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5697 init = init_expr;
5698 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5699 init = NULL;
5700 ggc_free (init_expr);
5702 if (ret == GS_ERROR)
5704 /* PR c++/28266 Make sure this is expanded only once. */
5705 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5706 return GS_ERROR;
5708 if (init)
5709 gimplify_and_add (init, pre_p);
5711 /* If needed, push the cleanup for the temp. */
5712 if (TARGET_EXPR_CLEANUP (targ))
5714 if (CLEANUP_EH_ONLY (targ))
5715 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5716 CLEANUP_EH_ONLY (targ), pre_p);
5717 else
5718 cleanup = TARGET_EXPR_CLEANUP (targ);
5721 /* Add a clobber for the temporary going out of scope, like
5722 gimplify_bind_expr. */
5723 if (gimplify_ctxp->in_cleanup_point_expr
5724 && needs_to_live_in_memory (temp)
5725 && flag_stack_reuse == SR_ALL)
5727 tree clobber = build_constructor (TREE_TYPE (temp),
5728 NULL);
5729 TREE_THIS_VOLATILE (clobber) = true;
5730 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5731 if (cleanup)
5732 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5733 clobber);
5734 else
5735 cleanup = clobber;
5738 if (cleanup)
5739 gimple_push_cleanup (temp, cleanup, false, pre_p);
5741 /* Only expand this once. */
5742 TREE_OPERAND (targ, 3) = init;
5743 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5745 else
5746 /* We should have expanded this before. */
5747 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5749 *expr_p = temp;
5750 return GS_OK;
5753 /* Gimplification of expression trees. */
5755 /* Gimplify an expression which appears at statement context. The
5756 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5757 NULL, a new sequence is allocated.
5759 Return true if we actually added a statement to the queue. */
5761 bool
5762 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5764 gimple_seq_node last;
5766 last = gimple_seq_last (*seq_p);
5767 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5768 return last != gimple_seq_last (*seq_p);
5771 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5772 to CTX. If entries already exist, force them to be some flavor of private.
5773 If there is no enclosing parallel, do nothing. */
5775 void
5776 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5778 splay_tree_node n;
5780 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
5781 return;
5785 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5786 if (n != NULL)
5788 if (n->value & GOVD_SHARED)
5789 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5790 else if (n->value & GOVD_MAP)
5791 n->value |= GOVD_MAP_TO_ONLY;
5792 else
5793 return;
5795 else if ((ctx->region_type & ORT_TARGET) != 0)
5797 if (ctx->target_map_scalars_firstprivate)
5798 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5799 else
5800 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5802 else if (ctx->region_type != ORT_WORKSHARE
5803 && ctx->region_type != ORT_SIMD
5804 && ctx->region_type != ORT_ACC
5805 && !(ctx->region_type & ORT_TARGET_DATA))
5806 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5808 ctx = ctx->outer_context;
5810 while (ctx);
5813 /* Similarly for each of the type sizes of TYPE. */
5815 static void
5816 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5818 if (type == NULL || type == error_mark_node)
5819 return;
5820 type = TYPE_MAIN_VARIANT (type);
5822 if (ctx->privatized_types->add (type))
5823 return;
5825 switch (TREE_CODE (type))
5827 case INTEGER_TYPE:
5828 case ENUMERAL_TYPE:
5829 case BOOLEAN_TYPE:
5830 case REAL_TYPE:
5831 case FIXED_POINT_TYPE:
5832 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5833 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5834 break;
5836 case ARRAY_TYPE:
5837 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5838 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5839 break;
5841 case RECORD_TYPE:
5842 case UNION_TYPE:
5843 case QUAL_UNION_TYPE:
5845 tree field;
5846 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5847 if (TREE_CODE (field) == FIELD_DECL)
5849 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5850 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5853 break;
5855 case POINTER_TYPE:
5856 case REFERENCE_TYPE:
5857 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5858 break;
5860 default:
5861 break;
5864 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5865 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5866 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5869 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
5871 static void
5872 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5874 splay_tree_node n;
5875 unsigned int nflags;
5876 tree t;
5878 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
5879 return;
5881 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5882 there are constructors involved somewhere. */
5883 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5884 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5885 flags |= GOVD_SEEN;
5887 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5888 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
5890 /* We shouldn't be re-adding the decl with the same data
5891 sharing class. */
5892 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5893 nflags = n->value | flags;
5894 /* The only combination of data sharing classes we should see is
5895 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
5896 reduction variables to be used in data sharing clauses. */
5897 gcc_assert ((ctx->region_type & ORT_ACC) != 0
5898 || ((nflags & GOVD_DATA_SHARE_CLASS)
5899 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
5900 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5901 n->value = nflags;
5902 return;
5905 /* When adding a variable-sized variable, we have to handle all sorts
5906 of additional bits of data: the pointer replacement variable, and
5907 the parameters of the type. */
5908 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5910 /* Add the pointer replacement variable as PRIVATE if the variable
5911 replacement is private, else FIRSTPRIVATE since we'll need the
5912 address of the original variable either for SHARED, or for the
5913 copy into or out of the context. */
5914 if (!(flags & GOVD_LOCAL))
5916 if (flags & GOVD_MAP)
5917 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
5918 else if (flags & GOVD_PRIVATE)
5919 nflags = GOVD_PRIVATE;
5920 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
5921 && (flags & GOVD_FIRSTPRIVATE))
5922 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
5923 else
5924 nflags = GOVD_FIRSTPRIVATE;
5925 nflags |= flags & GOVD_SEEN;
5926 t = DECL_VALUE_EXPR (decl);
5927 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5928 t = TREE_OPERAND (t, 0);
5929 gcc_assert (DECL_P (t));
5930 omp_add_variable (ctx, t, nflags);
5933 /* Add all of the variable and type parameters (which should have
5934 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5935 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5936 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5937 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5939 /* The variable-sized variable itself is never SHARED, only some form
5940 of PRIVATE. The sharing would take place via the pointer variable
5941 which we remapped above. */
5942 if (flags & GOVD_SHARED)
5943 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5944 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5946 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5947 alloca statement we generate for the variable, so make sure it
5948 is available. This isn't automatically needed for the SHARED
5949 case, since we won't be allocating local storage then.
5950 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5951 in this case omp_notice_variable will be called later
5952 on when it is gimplified. */
5953 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5954 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5955 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5957 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5958 && lang_hooks.decls.omp_privatize_by_reference (decl))
5960 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5962 /* Similar to the direct variable sized case above, we'll need the
5963 size of references being privatized. */
5964 if ((flags & GOVD_SHARED) == 0)
5966 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5967 if (DECL_P (t))
5968 omp_notice_variable (ctx, t, true);
5972 if (n != NULL)
5973 n->value |= flags;
5974 else
5975 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5978 /* Notice a threadprivate variable DECL used in OMP context CTX.
5979 This just prints out diagnostics about threadprivate variable uses
5980 in untied tasks. If DECL2 is non-NULL, prevent this warning
5981 on that variable. */
5983 static bool
5984 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5985 tree decl2)
5987 splay_tree_node n;
5988 struct gimplify_omp_ctx *octx;
5990 for (octx = ctx; octx; octx = octx->outer_context)
5991 if ((octx->region_type & ORT_TARGET) != 0)
5993 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5994 if (n == NULL)
5996 error ("threadprivate variable %qE used in target region",
5997 DECL_NAME (decl));
5998 error_at (octx->location, "enclosing target region");
5999 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6001 if (decl2)
6002 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6005 if (ctx->region_type != ORT_UNTIED_TASK)
6006 return false;
6007 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6008 if (n == NULL)
6010 error ("threadprivate variable %qE used in untied task",
6011 DECL_NAME (decl));
6012 error_at (ctx->location, "enclosing task");
6013 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6015 if (decl2)
6016 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6017 return false;
6020 /* Return true if global var DECL is device resident. */
6022 static bool
6023 device_resident_p (tree decl)
6025 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6027 if (!attr)
6028 return false;
6030 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6032 tree c = TREE_VALUE (t);
6033 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6034 return true;
6037 return false;
6040 /* Determine outer default flags for DECL mentioned in an OMP region
6041 but not declared in an enclosing clause.
6043 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6044 remapped firstprivate instead of shared. To some extent this is
6045 addressed in omp_firstprivatize_type_sizes, but not
6046 effectively. */
6048 static unsigned
6049 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6050 bool in_code, unsigned flags)
6052 enum omp_clause_default_kind default_kind = ctx->default_kind;
6053 enum omp_clause_default_kind kind;
6055 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6056 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6057 default_kind = kind;
6059 switch (default_kind)
6061 case OMP_CLAUSE_DEFAULT_NONE:
6063 const char *rtype;
6065 if (ctx->region_type & ORT_PARALLEL)
6066 rtype = "parallel";
6067 else if (ctx->region_type & ORT_TASK)
6068 rtype = "task";
6069 else if (ctx->region_type & ORT_TEAMS)
6070 rtype = "teams";
6071 else
6072 gcc_unreachable ();
6074 error ("%qE not specified in enclosing %s",
6075 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6076 error_at (ctx->location, "enclosing %s", rtype);
6078 /* FALLTHRU */
6079 case OMP_CLAUSE_DEFAULT_SHARED:
6080 flags |= GOVD_SHARED;
6081 break;
6082 case OMP_CLAUSE_DEFAULT_PRIVATE:
6083 flags |= GOVD_PRIVATE;
6084 break;
6085 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6086 flags |= GOVD_FIRSTPRIVATE;
6087 break;
6088 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6089 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6090 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6091 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6093 omp_notice_variable (octx, decl, in_code);
6094 for (; octx; octx = octx->outer_context)
6096 splay_tree_node n2;
6098 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6099 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6100 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6101 continue;
6102 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6104 flags |= GOVD_FIRSTPRIVATE;
6105 goto found_outer;
6107 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6109 flags |= GOVD_SHARED;
6110 goto found_outer;
6115 if (TREE_CODE (decl) == PARM_DECL
6116 || (!is_global_var (decl)
6117 && DECL_CONTEXT (decl) == current_function_decl))
6118 flags |= GOVD_FIRSTPRIVATE;
6119 else
6120 flags |= GOVD_SHARED;
6121 found_outer:
6122 break;
6124 default:
6125 gcc_unreachable ();
6128 return flags;
6132 /* Determine outer default flags for DECL mentioned in an OACC region
6133 but not declared in an enclosing clause. */
6135 static unsigned
6136 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6138 const char *rkind;
6139 bool on_device = false;
6140 tree type = TREE_TYPE (decl);
6142 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6143 type = TREE_TYPE (type);
6145 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6146 && is_global_var (decl)
6147 && device_resident_p (decl))
6149 on_device = true;
6150 flags |= GOVD_MAP_TO_ONLY;
6153 switch (ctx->region_type)
6155 default:
6156 gcc_unreachable ();
6158 case ORT_ACC_KERNELS:
6159 /* Scalars are default 'copy' under kernels, non-scalars are default
6160 'present_or_copy'. */
6161 flags |= GOVD_MAP;
6162 if (!AGGREGATE_TYPE_P (type))
6163 flags |= GOVD_MAP_FORCE;
6165 rkind = "kernels";
6166 break;
6168 case ORT_ACC_PARALLEL:
6170 if (on_device || AGGREGATE_TYPE_P (type))
6171 /* Aggregates default to 'present_or_copy'. */
6172 flags |= GOVD_MAP;
6173 else
6174 /* Scalars default to 'firstprivate'. */
6175 flags |= GOVD_FIRSTPRIVATE;
6176 rkind = "parallel";
6178 break;
6181 if (DECL_ARTIFICIAL (decl))
6182 ; /* We can get compiler-generated decls, and should not complain
6183 about them. */
6184 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
6186 error ("%qE not specified in enclosing OpenACC %qs construct",
6187 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
6188 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
6190 else
6191 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
6193 return flags;
6196 /* Record the fact that DECL was used within the OMP context CTX.
6197 IN_CODE is true when real code uses DECL, and false when we should
6198 merely emit default(none) errors. Return true if DECL is going to
6199 be remapped and thus DECL shouldn't be gimplified into its
6200 DECL_VALUE_EXPR (if any). */
6202 static bool
6203 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
6205 splay_tree_node n;
6206 unsigned flags = in_code ? GOVD_SEEN : 0;
6207 bool ret = false, shared;
6209 if (error_operand_p (decl))
6210 return false;
6212 if (ctx->region_type == ORT_NONE)
6213 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
6215 if (is_global_var (decl))
6217 /* Threadprivate variables are predetermined. */
6218 if (DECL_THREAD_LOCAL_P (decl))
6219 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
6221 if (DECL_HAS_VALUE_EXPR_P (decl))
6223 tree value = get_base_address (DECL_VALUE_EXPR (decl));
6225 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
6226 return omp_notice_threadprivate_variable (ctx, decl, value);
6229 if (gimplify_omp_ctxp->outer_context == NULL
6230 && VAR_P (decl)
6231 && get_oacc_fn_attrib (current_function_decl))
6233 location_t loc = DECL_SOURCE_LOCATION (decl);
6235 if (lookup_attribute ("omp declare target link",
6236 DECL_ATTRIBUTES (decl)))
6238 error_at (loc,
6239 "%qE with %<link%> clause used in %<routine%> function",
6240 DECL_NAME (decl));
6241 return false;
6243 else if (!lookup_attribute ("omp declare target",
6244 DECL_ATTRIBUTES (decl)))
6246 error_at (loc,
6247 "%qE requires a %<declare%> directive for use "
6248 "in a %<routine%> function", DECL_NAME (decl));
6249 return false;
6254 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6255 if ((ctx->region_type & ORT_TARGET) != 0)
6257 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
6258 if (n == NULL)
6260 unsigned nflags = flags;
6261 if (ctx->target_map_pointers_as_0len_arrays
6262 || ctx->target_map_scalars_firstprivate)
6264 bool is_declare_target = false;
6265 bool is_scalar = false;
6266 if (is_global_var (decl)
6267 && varpool_node::get_create (decl)->offloadable)
6269 struct gimplify_omp_ctx *octx;
6270 for (octx = ctx->outer_context;
6271 octx; octx = octx->outer_context)
6273 n = splay_tree_lookup (octx->variables,
6274 (splay_tree_key)decl);
6275 if (n
6276 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
6277 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6278 break;
6280 is_declare_target = octx == NULL;
6282 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
6284 tree type = TREE_TYPE (decl);
6285 if (TREE_CODE (type) == REFERENCE_TYPE)
6286 type = TREE_TYPE (type);
6287 if (TREE_CODE (type) == COMPLEX_TYPE)
6288 type = TREE_TYPE (type);
6289 if (INTEGRAL_TYPE_P (type)
6290 || SCALAR_FLOAT_TYPE_P (type)
6291 || TREE_CODE (type) == POINTER_TYPE)
6292 is_scalar = true;
6294 if (is_declare_target)
6296 else if (ctx->target_map_pointers_as_0len_arrays
6297 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
6298 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
6299 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
6300 == POINTER_TYPE)))
6301 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
6302 else if (is_scalar)
6303 nflags |= GOVD_FIRSTPRIVATE;
6306 struct gimplify_omp_ctx *octx = ctx->outer_context;
6307 if ((ctx->region_type & ORT_ACC) && octx)
6309 /* Look in outer OpenACC contexts, to see if there's a
6310 data attribute for this variable. */
6311 omp_notice_variable (octx, decl, in_code);
6313 for (; octx; octx = octx->outer_context)
6315 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
6316 break;
6317 splay_tree_node n2
6318 = splay_tree_lookup (octx->variables,
6319 (splay_tree_key) decl);
6320 if (n2)
6322 if (octx->region_type == ORT_ACC_HOST_DATA)
6323 error ("variable %qE declared in enclosing "
6324 "%<host_data%> region", DECL_NAME (decl));
6325 nflags |= GOVD_MAP;
6326 if (octx->region_type == ORT_ACC_DATA
6327 && (n2->value & GOVD_MAP_0LEN_ARRAY))
6328 nflags |= GOVD_MAP_0LEN_ARRAY;
6329 goto found_outer;
6335 tree type = TREE_TYPE (decl);
6337 if (nflags == flags
6338 && gimplify_omp_ctxp->target_firstprivatize_array_bases
6339 && lang_hooks.decls.omp_privatize_by_reference (decl))
6340 type = TREE_TYPE (type);
6341 if (nflags == flags
6342 && !lang_hooks.types.omp_mappable_type (type))
6344 error ("%qD referenced in target region does not have "
6345 "a mappable type", decl);
6346 nflags |= GOVD_MAP | GOVD_EXPLICIT;
6348 else if (nflags == flags)
6350 if ((ctx->region_type & ORT_ACC) != 0)
6351 nflags = oacc_default_clause (ctx, decl, flags);
6352 else
6353 nflags |= GOVD_MAP;
6356 found_outer:
6357 omp_add_variable (ctx, decl, nflags);
6359 else
6361 /* If nothing changed, there's nothing left to do. */
6362 if ((n->value & flags) == flags)
6363 return ret;
6364 flags |= n->value;
6365 n->value = flags;
6367 goto do_outer;
6370 if (n == NULL)
6372 if (ctx->region_type == ORT_WORKSHARE
6373 || ctx->region_type == ORT_SIMD
6374 || ctx->region_type == ORT_ACC
6375 || (ctx->region_type & ORT_TARGET_DATA) != 0)
6376 goto do_outer;
6378 flags = omp_default_clause (ctx, decl, in_code, flags);
6380 if ((flags & GOVD_PRIVATE)
6381 && lang_hooks.decls.omp_private_outer_ref (decl))
6382 flags |= GOVD_PRIVATE_OUTER_REF;
6384 omp_add_variable (ctx, decl, flags);
6386 shared = (flags & GOVD_SHARED) != 0;
6387 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6388 goto do_outer;
6391 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
6392 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
6393 && DECL_SIZE (decl))
6395 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6397 splay_tree_node n2;
6398 tree t = DECL_VALUE_EXPR (decl);
6399 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6400 t = TREE_OPERAND (t, 0);
6401 gcc_assert (DECL_P (t));
6402 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6403 n2->value |= GOVD_SEEN;
6405 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
6406 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
6407 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
6408 != INTEGER_CST))
6410 splay_tree_node n2;
6411 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6412 gcc_assert (DECL_P (t));
6413 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6414 if (n2)
6415 n2->value |= GOVD_SEEN;
6419 shared = ((flags | n->value) & GOVD_SHARED) != 0;
6420 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6422 /* If nothing changed, there's nothing left to do. */
6423 if ((n->value & flags) == flags)
6424 return ret;
6425 flags |= n->value;
6426 n->value = flags;
6428 do_outer:
6429 /* If the variable is private in the current context, then we don't
6430 need to propagate anything to an outer context. */
6431 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
6432 return ret;
6433 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6434 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6435 return ret;
6436 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6437 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6438 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6439 return ret;
6440 if (ctx->outer_context
6441 && omp_notice_variable (ctx->outer_context, decl, in_code))
6442 return true;
6443 return ret;
6446 /* Verify that DECL is private within CTX. If there's specific information
6447 to the contrary in the innermost scope, generate an error. */
6449 static bool
6450 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
6452 splay_tree_node n;
6454 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6455 if (n != NULL)
6457 if (n->value & GOVD_SHARED)
6459 if (ctx == gimplify_omp_ctxp)
6461 if (simd)
6462 error ("iteration variable %qE is predetermined linear",
6463 DECL_NAME (decl));
6464 else
6465 error ("iteration variable %qE should be private",
6466 DECL_NAME (decl));
6467 n->value = GOVD_PRIVATE;
6468 return true;
6470 else
6471 return false;
6473 else if ((n->value & GOVD_EXPLICIT) != 0
6474 && (ctx == gimplify_omp_ctxp
6475 || (ctx->region_type == ORT_COMBINED_PARALLEL
6476 && gimplify_omp_ctxp->outer_context == ctx)))
6478 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
6479 error ("iteration variable %qE should not be firstprivate",
6480 DECL_NAME (decl));
6481 else if ((n->value & GOVD_REDUCTION) != 0)
6482 error ("iteration variable %qE should not be reduction",
6483 DECL_NAME (decl));
6484 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
6485 error ("iteration variable %qE should not be linear",
6486 DECL_NAME (decl));
6487 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
6488 error ("iteration variable %qE should not be lastprivate",
6489 DECL_NAME (decl));
6490 else if (simd && (n->value & GOVD_PRIVATE) != 0)
6491 error ("iteration variable %qE should not be private",
6492 DECL_NAME (decl));
6493 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
6494 error ("iteration variable %qE is predetermined linear",
6495 DECL_NAME (decl));
6497 return (ctx == gimplify_omp_ctxp
6498 || (ctx->region_type == ORT_COMBINED_PARALLEL
6499 && gimplify_omp_ctxp->outer_context == ctx));
6502 if (ctx->region_type != ORT_WORKSHARE
6503 && ctx->region_type != ORT_SIMD
6504 && ctx->region_type != ORT_ACC)
6505 return false;
6506 else if (ctx->outer_context)
6507 return omp_is_private (ctx->outer_context, decl, simd);
6508 return false;
6511 /* Return true if DECL is private within a parallel region
6512 that binds to the current construct's context or in parallel
6513 region's REDUCTION clause. */
6515 static bool
6516 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
6518 splay_tree_node n;
6522 ctx = ctx->outer_context;
6523 if (ctx == NULL)
6525 if (is_global_var (decl))
6526 return false;
6528 /* References might be private, but might be shared too,
6529 when checking for copyprivate, assume they might be
6530 private, otherwise assume they might be shared. */
6531 if (copyprivate)
6532 return true;
6534 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6535 return false;
6537 /* Treat C++ privatized non-static data members outside
6538 of the privatization the same. */
6539 if (omp_member_access_dummy_var (decl))
6540 return false;
6542 return true;
6545 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6547 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6548 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
6549 continue;
6551 if (n != NULL)
6553 if ((n->value & GOVD_LOCAL) != 0
6554 && omp_member_access_dummy_var (decl))
6555 return false;
6556 return (n->value & GOVD_SHARED) == 0;
6559 while (ctx->region_type == ORT_WORKSHARE
6560 || ctx->region_type == ORT_SIMD
6561 || ctx->region_type == ORT_ACC);
6562 return false;
6565 /* Return true if the CTX is combined with distribute and thus
6566 lastprivate can't be supported. */
6568 static bool
6569 omp_no_lastprivate (struct gimplify_omp_ctx *ctx)
6573 if (ctx->outer_context == NULL)
6574 return false;
6575 ctx = ctx->outer_context;
6576 switch (ctx->region_type)
6578 case ORT_WORKSHARE:
6579 if (!ctx->combined_loop)
6580 return false;
6581 if (ctx->distribute)
6582 return lang_GNU_Fortran ();
6583 break;
6584 case ORT_COMBINED_PARALLEL:
6585 break;
6586 case ORT_COMBINED_TEAMS:
6587 return lang_GNU_Fortran ();
6588 default:
6589 return false;
6592 while (1);
6595 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
6597 static tree
6598 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
6600 tree t = *tp;
6602 /* If this node has been visited, unmark it and keep looking. */
6603 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
6604 return t;
6606 if (IS_TYPE_OR_DECL_P (t))
6607 *walk_subtrees = 0;
6608 return NULL_TREE;
6611 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
6612 and previous omp contexts. */
6614 static void
6615 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6616 enum omp_region_type region_type,
6617 enum tree_code code)
6619 struct gimplify_omp_ctx *ctx, *outer_ctx;
6620 tree c;
6621 hash_map<tree, tree> *struct_map_to_clause = NULL;
6622 tree *prev_list_p = NULL;
6624 ctx = new_omp_context (region_type);
6625 outer_ctx = ctx->outer_context;
6626 if (code == OMP_TARGET && !lang_GNU_Fortran ())
6628 ctx->target_map_pointers_as_0len_arrays = true;
6629 /* FIXME: For Fortran we want to set this too, when
6630 the Fortran FE is updated to OpenMP 4.5. */
6631 ctx->target_map_scalars_firstprivate = true;
6633 if (!lang_GNU_Fortran ())
6634 switch (code)
6636 case OMP_TARGET:
6637 case OMP_TARGET_DATA:
6638 case OMP_TARGET_ENTER_DATA:
6639 case OMP_TARGET_EXIT_DATA:
6640 case OACC_HOST_DATA:
6641 ctx->target_firstprivatize_array_bases = true;
6642 default:
6643 break;
6646 while ((c = *list_p) != NULL)
6648 bool remove = false;
6649 bool notice_outer = true;
6650 const char *check_non_private = NULL;
6651 unsigned int flags;
6652 tree decl;
6654 switch (OMP_CLAUSE_CODE (c))
6656 case OMP_CLAUSE_PRIVATE:
6657 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6658 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6660 flags |= GOVD_PRIVATE_OUTER_REF;
6661 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6663 else
6664 notice_outer = false;
6665 goto do_add;
6666 case OMP_CLAUSE_SHARED:
6667 flags = GOVD_SHARED | GOVD_EXPLICIT;
6668 goto do_add;
6669 case OMP_CLAUSE_FIRSTPRIVATE:
6670 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6671 check_non_private = "firstprivate";
6672 goto do_add;
6673 case OMP_CLAUSE_LASTPRIVATE:
6674 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6675 check_non_private = "lastprivate";
6676 decl = OMP_CLAUSE_DECL (c);
6677 if (omp_no_lastprivate (ctx))
6679 notice_outer = false;
6680 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6682 else if (error_operand_p (decl))
6683 goto do_add;
6684 else if (outer_ctx
6685 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
6686 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
6687 && splay_tree_lookup (outer_ctx->variables,
6688 (splay_tree_key) decl) == NULL)
6690 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
6691 if (outer_ctx->outer_context)
6692 omp_notice_variable (outer_ctx->outer_context, decl, true);
6694 else if (outer_ctx
6695 && (outer_ctx->region_type & ORT_TASK) != 0
6696 && outer_ctx->combined_loop
6697 && splay_tree_lookup (outer_ctx->variables,
6698 (splay_tree_key) decl) == NULL)
6700 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6701 if (outer_ctx->outer_context)
6702 omp_notice_variable (outer_ctx->outer_context, decl, true);
6704 else if (outer_ctx
6705 && (outer_ctx->region_type == ORT_WORKSHARE
6706 || outer_ctx->region_type == ORT_ACC)
6707 && outer_ctx->combined_loop
6708 && splay_tree_lookup (outer_ctx->variables,
6709 (splay_tree_key) decl) == NULL
6710 && !omp_check_private (outer_ctx, decl, false))
6712 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6713 if (outer_ctx->outer_context
6714 && (outer_ctx->outer_context->region_type
6715 == ORT_COMBINED_PARALLEL)
6716 && splay_tree_lookup (outer_ctx->outer_context->variables,
6717 (splay_tree_key) decl) == NULL)
6719 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
6720 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
6721 if (octx->outer_context)
6722 omp_notice_variable (octx->outer_context, decl, true);
6724 else if (outer_ctx->outer_context)
6725 omp_notice_variable (outer_ctx->outer_context, decl, true);
6727 goto do_add;
6728 case OMP_CLAUSE_REDUCTION:
6729 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6730 /* OpenACC permits reductions on private variables. */
6731 if (!(region_type & ORT_ACC))
6732 check_non_private = "reduction";
6733 decl = OMP_CLAUSE_DECL (c);
6734 if (TREE_CODE (decl) == MEM_REF)
6736 tree type = TREE_TYPE (decl);
6737 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
6738 NULL, is_gimple_val, fb_rvalue, false)
6739 == GS_ERROR)
6741 remove = true;
6742 break;
6744 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6745 if (DECL_P (v))
6747 omp_firstprivatize_variable (ctx, v);
6748 omp_notice_variable (ctx, v, true);
6750 decl = TREE_OPERAND (decl, 0);
6751 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
6753 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
6754 NULL, is_gimple_val, fb_rvalue, false)
6755 == GS_ERROR)
6757 remove = true;
6758 break;
6760 v = TREE_OPERAND (decl, 1);
6761 if (DECL_P (v))
6763 omp_firstprivatize_variable (ctx, v);
6764 omp_notice_variable (ctx, v, true);
6766 decl = TREE_OPERAND (decl, 0);
6768 if (TREE_CODE (decl) == ADDR_EXPR
6769 || TREE_CODE (decl) == INDIRECT_REF)
6770 decl = TREE_OPERAND (decl, 0);
6772 goto do_add_decl;
6773 case OMP_CLAUSE_LINEAR:
6774 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6775 is_gimple_val, fb_rvalue) == GS_ERROR)
6777 remove = true;
6778 break;
6780 else
6782 if (code == OMP_SIMD
6783 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6785 struct gimplify_omp_ctx *octx = outer_ctx;
6786 if (octx
6787 && octx->region_type == ORT_WORKSHARE
6788 && octx->combined_loop
6789 && !octx->distribute)
6791 if (octx->outer_context
6792 && (octx->outer_context->region_type
6793 == ORT_COMBINED_PARALLEL))
6794 octx = octx->outer_context->outer_context;
6795 else
6796 octx = octx->outer_context;
6798 if (octx
6799 && octx->region_type == ORT_WORKSHARE
6800 && octx->combined_loop
6801 && octx->distribute
6802 && !lang_GNU_Fortran ())
6804 error_at (OMP_CLAUSE_LOCATION (c),
6805 "%<linear%> clause for variable other than "
6806 "loop iterator specified on construct "
6807 "combined with %<distribute%>");
6808 remove = true;
6809 break;
6812 /* For combined #pragma omp parallel for simd, need to put
6813 lastprivate and perhaps firstprivate too on the
6814 parallel. Similarly for #pragma omp for simd. */
6815 struct gimplify_omp_ctx *octx = outer_ctx;
6816 decl = NULL_TREE;
6817 if (omp_no_lastprivate (ctx))
6818 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6821 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6822 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6823 break;
6824 decl = OMP_CLAUSE_DECL (c);
6825 if (error_operand_p (decl))
6827 decl = NULL_TREE;
6828 break;
6830 flags = GOVD_SEEN;
6831 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6832 flags |= GOVD_FIRSTPRIVATE;
6833 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6834 flags |= GOVD_LASTPRIVATE;
6835 if (octx
6836 && octx->region_type == ORT_WORKSHARE
6837 && octx->combined_loop)
6839 if (octx->outer_context
6840 && (octx->outer_context->region_type
6841 == ORT_COMBINED_PARALLEL))
6842 octx = octx->outer_context;
6843 else if (omp_check_private (octx, decl, false))
6844 break;
6846 else if (octx
6847 && (octx->region_type & ORT_TASK) != 0
6848 && octx->combined_loop)
6850 else if (octx
6851 && octx->region_type == ORT_COMBINED_PARALLEL
6852 && ctx->region_type == ORT_WORKSHARE
6853 && octx == outer_ctx)
6854 flags = GOVD_SEEN | GOVD_SHARED;
6855 else if (octx
6856 && octx->region_type == ORT_COMBINED_TEAMS)
6857 flags = GOVD_SEEN | GOVD_SHARED;
6858 else if (octx
6859 && octx->region_type == ORT_COMBINED_TARGET)
6861 flags &= ~GOVD_LASTPRIVATE;
6862 if (flags == GOVD_SEEN)
6863 break;
6865 else
6866 break;
6867 splay_tree_node on
6868 = splay_tree_lookup (octx->variables,
6869 (splay_tree_key) decl);
6870 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
6872 octx = NULL;
6873 break;
6875 omp_add_variable (octx, decl, flags);
6876 if (octx->outer_context == NULL)
6877 break;
6878 octx = octx->outer_context;
6880 while (1);
6881 if (octx
6882 && decl
6883 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6884 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6885 omp_notice_variable (octx, decl, true);
6887 flags = GOVD_LINEAR | GOVD_EXPLICIT;
6888 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6889 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6891 notice_outer = false;
6892 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6894 goto do_add;
6896 case OMP_CLAUSE_MAP:
6897 decl = OMP_CLAUSE_DECL (c);
6898 if (error_operand_p (decl))
6899 remove = true;
6900 switch (code)
6902 case OMP_TARGET:
6903 break;
6904 case OACC_DATA:
6905 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
6906 break;
6907 case OMP_TARGET_DATA:
6908 case OMP_TARGET_ENTER_DATA:
6909 case OMP_TARGET_EXIT_DATA:
6910 case OACC_ENTER_DATA:
6911 case OACC_EXIT_DATA:
6912 case OACC_HOST_DATA:
6913 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6914 || (OMP_CLAUSE_MAP_KIND (c)
6915 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6916 /* For target {,enter ,exit }data only the array slice is
6917 mapped, but not the pointer to it. */
6918 remove = true;
6919 break;
6920 default:
6921 break;
6923 if (remove)
6924 break;
6925 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
6927 struct gimplify_omp_ctx *octx;
6928 for (octx = outer_ctx; octx; octx = octx->outer_context)
6930 if (octx->region_type != ORT_ACC_HOST_DATA)
6931 break;
6932 splay_tree_node n2
6933 = splay_tree_lookup (octx->variables,
6934 (splay_tree_key) decl);
6935 if (n2)
6936 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
6937 "declared in enclosing %<host_data%> region",
6938 DECL_NAME (decl));
6941 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6942 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6943 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6944 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6945 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6947 remove = true;
6948 break;
6950 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6951 || (OMP_CLAUSE_MAP_KIND (c)
6952 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6953 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
6955 OMP_CLAUSE_SIZE (c)
6956 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
6957 false);
6958 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
6959 GOVD_FIRSTPRIVATE | GOVD_SEEN);
6961 if (!DECL_P (decl))
6963 tree d = decl, *pd;
6964 if (TREE_CODE (d) == ARRAY_REF)
6966 while (TREE_CODE (d) == ARRAY_REF)
6967 d = TREE_OPERAND (d, 0);
6968 if (TREE_CODE (d) == COMPONENT_REF
6969 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
6970 decl = d;
6972 pd = &OMP_CLAUSE_DECL (c);
6973 if (d == decl
6974 && TREE_CODE (decl) == INDIRECT_REF
6975 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
6976 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
6977 == REFERENCE_TYPE))
6979 pd = &TREE_OPERAND (decl, 0);
6980 decl = TREE_OPERAND (decl, 0);
6982 if (TREE_CODE (decl) == COMPONENT_REF)
6984 while (TREE_CODE (decl) == COMPONENT_REF)
6985 decl = TREE_OPERAND (decl, 0);
6986 if (TREE_CODE (decl) == INDIRECT_REF
6987 && DECL_P (TREE_OPERAND (decl, 0))
6988 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
6989 == REFERENCE_TYPE))
6990 decl = TREE_OPERAND (decl, 0);
6992 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
6993 == GS_ERROR)
6995 remove = true;
6996 break;
6998 if (DECL_P (decl))
7000 if (error_operand_p (decl))
7002 remove = true;
7003 break;
7006 tree stype = TREE_TYPE (decl);
7007 if (TREE_CODE (stype) == REFERENCE_TYPE)
7008 stype = TREE_TYPE (stype);
7009 if (TYPE_SIZE_UNIT (stype) == NULL
7010 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7012 error_at (OMP_CLAUSE_LOCATION (c),
7013 "mapping field %qE of variable length "
7014 "structure", OMP_CLAUSE_DECL (c));
7015 remove = true;
7016 break;
7019 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7021 /* Error recovery. */
7022 if (prev_list_p == NULL)
7024 remove = true;
7025 break;
7027 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7029 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7030 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7032 remove = true;
7033 break;
7038 tree offset;
7039 HOST_WIDE_INT bitsize, bitpos;
7040 machine_mode mode;
7041 int unsignedp, reversep, volatilep = 0;
7042 tree base = OMP_CLAUSE_DECL (c);
7043 while (TREE_CODE (base) == ARRAY_REF)
7044 base = TREE_OPERAND (base, 0);
7045 if (TREE_CODE (base) == INDIRECT_REF)
7046 base = TREE_OPERAND (base, 0);
7047 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7048 &mode, &unsignedp, &reversep,
7049 &volatilep, false);
7050 tree orig_base = base;
7051 if ((TREE_CODE (base) == INDIRECT_REF
7052 || (TREE_CODE (base) == MEM_REF
7053 && integer_zerop (TREE_OPERAND (base, 1))))
7054 && DECL_P (TREE_OPERAND (base, 0))
7055 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7056 == REFERENCE_TYPE))
7057 base = TREE_OPERAND (base, 0);
7058 gcc_assert (base == decl
7059 && (offset == NULL_TREE
7060 || TREE_CODE (offset) == INTEGER_CST));
7062 splay_tree_node n
7063 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7064 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7065 == GOMP_MAP_ALWAYS_POINTER);
7066 if (n == NULL || (n->value & GOVD_MAP) == 0)
7068 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7069 OMP_CLAUSE_MAP);
7070 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7071 if (orig_base != base)
7072 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7073 else
7074 OMP_CLAUSE_DECL (l) = decl;
7075 OMP_CLAUSE_SIZE (l) = size_int (1);
7076 if (struct_map_to_clause == NULL)
7077 struct_map_to_clause = new hash_map<tree, tree>;
7078 struct_map_to_clause->put (decl, l);
7079 if (ptr)
7081 enum gomp_map_kind mkind
7082 = code == OMP_TARGET_EXIT_DATA
7083 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7084 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7085 OMP_CLAUSE_MAP);
7086 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7087 OMP_CLAUSE_DECL (c2)
7088 = unshare_expr (OMP_CLAUSE_DECL (c));
7089 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7090 OMP_CLAUSE_SIZE (c2)
7091 = TYPE_SIZE_UNIT (ptr_type_node);
7092 OMP_CLAUSE_CHAIN (l) = c2;
7093 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7095 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7096 tree c3
7097 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7098 OMP_CLAUSE_MAP);
7099 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7100 OMP_CLAUSE_DECL (c3)
7101 = unshare_expr (OMP_CLAUSE_DECL (c4));
7102 OMP_CLAUSE_SIZE (c3)
7103 = TYPE_SIZE_UNIT (ptr_type_node);
7104 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7105 OMP_CLAUSE_CHAIN (c2) = c3;
7107 *prev_list_p = l;
7108 prev_list_p = NULL;
7110 else
7112 OMP_CLAUSE_CHAIN (l) = c;
7113 *list_p = l;
7114 list_p = &OMP_CLAUSE_CHAIN (l);
7116 if (orig_base != base && code == OMP_TARGET)
7118 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7119 OMP_CLAUSE_MAP);
7120 enum gomp_map_kind mkind
7121 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7122 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7123 OMP_CLAUSE_DECL (c2) = decl;
7124 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7125 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7126 OMP_CLAUSE_CHAIN (l) = c2;
7128 flags = GOVD_MAP | GOVD_EXPLICIT;
7129 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7130 flags |= GOVD_SEEN;
7131 goto do_add_decl;
7133 else
7135 tree *osc = struct_map_to_clause->get (decl);
7136 tree *sc = NULL, *scp = NULL;
7137 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7138 n->value |= GOVD_SEEN;
7139 offset_int o1, o2;
7140 if (offset)
7141 o1 = wi::to_offset (offset);
7142 else
7143 o1 = 0;
7144 if (bitpos)
7145 o1 = o1 + bitpos / BITS_PER_UNIT;
7146 sc = &OMP_CLAUSE_CHAIN (*osc);
7147 if (*sc != c
7148 && (OMP_CLAUSE_MAP_KIND (*sc)
7149 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7150 sc = &OMP_CLAUSE_CHAIN (*sc);
7151 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7152 if (ptr && sc == prev_list_p)
7153 break;
7154 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7155 != COMPONENT_REF
7156 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7157 != INDIRECT_REF)
7158 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7159 != ARRAY_REF))
7160 break;
7161 else
7163 tree offset2;
7164 HOST_WIDE_INT bitsize2, bitpos2;
7165 base = OMP_CLAUSE_DECL (*sc);
7166 if (TREE_CODE (base) == ARRAY_REF)
7168 while (TREE_CODE (base) == ARRAY_REF)
7169 base = TREE_OPERAND (base, 0);
7170 if (TREE_CODE (base) != COMPONENT_REF
7171 || (TREE_CODE (TREE_TYPE (base))
7172 != ARRAY_TYPE))
7173 break;
7175 else if (TREE_CODE (base) == INDIRECT_REF
7176 && (TREE_CODE (TREE_OPERAND (base, 0))
7177 == COMPONENT_REF)
7178 && (TREE_CODE (TREE_TYPE
7179 (TREE_OPERAND (base, 0)))
7180 == REFERENCE_TYPE))
7181 base = TREE_OPERAND (base, 0);
7182 base = get_inner_reference (base, &bitsize2,
7183 &bitpos2, &offset2,
7184 &mode, &unsignedp,
7185 &reversep, &volatilep,
7186 false);
7187 if ((TREE_CODE (base) == INDIRECT_REF
7188 || (TREE_CODE (base) == MEM_REF
7189 && integer_zerop (TREE_OPERAND (base,
7190 1))))
7191 && DECL_P (TREE_OPERAND (base, 0))
7192 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
7193 0)))
7194 == REFERENCE_TYPE))
7195 base = TREE_OPERAND (base, 0);
7196 if (base != decl)
7197 break;
7198 if (scp)
7199 continue;
7200 gcc_assert (offset == NULL_TREE
7201 || TREE_CODE (offset) == INTEGER_CST);
7202 tree d1 = OMP_CLAUSE_DECL (*sc);
7203 tree d2 = OMP_CLAUSE_DECL (c);
7204 while (TREE_CODE (d1) == ARRAY_REF)
7205 d1 = TREE_OPERAND (d1, 0);
7206 while (TREE_CODE (d2) == ARRAY_REF)
7207 d2 = TREE_OPERAND (d2, 0);
7208 if (TREE_CODE (d1) == INDIRECT_REF)
7209 d1 = TREE_OPERAND (d1, 0);
7210 if (TREE_CODE (d2) == INDIRECT_REF)
7211 d2 = TREE_OPERAND (d2, 0);
7212 while (TREE_CODE (d1) == COMPONENT_REF)
7213 if (TREE_CODE (d2) == COMPONENT_REF
7214 && TREE_OPERAND (d1, 1)
7215 == TREE_OPERAND (d2, 1))
7217 d1 = TREE_OPERAND (d1, 0);
7218 d2 = TREE_OPERAND (d2, 0);
7220 else
7221 break;
7222 if (d1 == d2)
7224 error_at (OMP_CLAUSE_LOCATION (c),
7225 "%qE appears more than once in map "
7226 "clauses", OMP_CLAUSE_DECL (c));
7227 remove = true;
7228 break;
7230 if (offset2)
7231 o2 = wi::to_offset (offset2);
7232 else
7233 o2 = 0;
7234 if (bitpos2)
7235 o2 = o2 + bitpos2 / BITS_PER_UNIT;
7236 if (wi::ltu_p (o1, o2)
7237 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
7239 if (ptr)
7240 scp = sc;
7241 else
7242 break;
7245 if (remove)
7246 break;
7247 OMP_CLAUSE_SIZE (*osc)
7248 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
7249 size_one_node);
7250 if (ptr)
7252 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7253 OMP_CLAUSE_MAP);
7254 tree cl = NULL_TREE;
7255 enum gomp_map_kind mkind
7256 = code == OMP_TARGET_EXIT_DATA
7257 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7258 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7259 OMP_CLAUSE_DECL (c2)
7260 = unshare_expr (OMP_CLAUSE_DECL (c));
7261 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
7262 OMP_CLAUSE_SIZE (c2)
7263 = TYPE_SIZE_UNIT (ptr_type_node);
7264 cl = scp ? *prev_list_p : c2;
7265 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7267 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7268 tree c3
7269 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7270 OMP_CLAUSE_MAP);
7271 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7272 OMP_CLAUSE_DECL (c3)
7273 = unshare_expr (OMP_CLAUSE_DECL (c4));
7274 OMP_CLAUSE_SIZE (c3)
7275 = TYPE_SIZE_UNIT (ptr_type_node);
7276 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7277 if (!scp)
7278 OMP_CLAUSE_CHAIN (c2) = c3;
7279 else
7280 cl = c3;
7282 if (scp)
7283 *scp = c2;
7284 if (sc == prev_list_p)
7286 *sc = cl;
7287 prev_list_p = NULL;
7289 else
7291 *prev_list_p = OMP_CLAUSE_CHAIN (c);
7292 list_p = prev_list_p;
7293 prev_list_p = NULL;
7294 OMP_CLAUSE_CHAIN (c) = *sc;
7295 *sc = cl;
7296 continue;
7299 else if (*sc != c)
7301 *list_p = OMP_CLAUSE_CHAIN (c);
7302 OMP_CLAUSE_CHAIN (c) = *sc;
7303 *sc = c;
7304 continue;
7308 if (!remove
7309 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
7310 && OMP_CLAUSE_CHAIN (c)
7311 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
7312 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
7313 == GOMP_MAP_ALWAYS_POINTER))
7314 prev_list_p = list_p;
7315 break;
7317 flags = GOVD_MAP | GOVD_EXPLICIT;
7318 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
7319 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
7320 flags |= GOVD_MAP_ALWAYS_TO;
7321 goto do_add;
7323 case OMP_CLAUSE_DEPEND:
7324 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
7325 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
7327 /* Nothing to do. OMP_CLAUSE_DECL will be lowered in
7328 omp-low.c. */
7329 break;
7331 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7333 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7334 NULL, is_gimple_val, fb_rvalue);
7335 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7337 if (error_operand_p (OMP_CLAUSE_DECL (c)))
7339 remove = true;
7340 break;
7342 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
7343 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7344 is_gimple_val, fb_rvalue) == GS_ERROR)
7346 remove = true;
7347 break;
7349 break;
7351 case OMP_CLAUSE_TO:
7352 case OMP_CLAUSE_FROM:
7353 case OMP_CLAUSE__CACHE_:
7354 decl = OMP_CLAUSE_DECL (c);
7355 if (error_operand_p (decl))
7357 remove = true;
7358 break;
7360 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7361 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7362 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7363 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7364 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7366 remove = true;
7367 break;
7369 if (!DECL_P (decl))
7371 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
7372 NULL, is_gimple_lvalue, fb_lvalue)
7373 == GS_ERROR)
7375 remove = true;
7376 break;
7378 break;
7380 goto do_notice;
7382 case OMP_CLAUSE_USE_DEVICE_PTR:
7383 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7384 goto do_add;
7385 case OMP_CLAUSE_IS_DEVICE_PTR:
7386 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7387 goto do_add;
7389 do_add:
7390 decl = OMP_CLAUSE_DECL (c);
7391 do_add_decl:
7392 if (error_operand_p (decl))
7394 remove = true;
7395 break;
7397 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
7399 tree t = omp_member_access_dummy_var (decl);
7400 if (t)
7402 tree v = DECL_VALUE_EXPR (decl);
7403 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
7404 if (outer_ctx)
7405 omp_notice_variable (outer_ctx, t, true);
7408 if (code == OACC_DATA
7409 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7410 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
7411 flags |= GOVD_MAP_0LEN_ARRAY;
7412 omp_add_variable (ctx, decl, flags);
7413 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7414 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7416 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
7417 GOVD_LOCAL | GOVD_SEEN);
7418 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
7419 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
7420 find_decl_expr,
7421 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7422 NULL) == NULL_TREE)
7423 omp_add_variable (ctx,
7424 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7425 GOVD_LOCAL | GOVD_SEEN);
7426 gimplify_omp_ctxp = ctx;
7427 push_gimplify_context ();
7429 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
7430 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7432 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
7433 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
7434 pop_gimplify_context
7435 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
7436 push_gimplify_context ();
7437 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
7438 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7439 pop_gimplify_context
7440 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
7441 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
7442 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
7444 gimplify_omp_ctxp = outer_ctx;
7446 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7447 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
7449 gimplify_omp_ctxp = ctx;
7450 push_gimplify_context ();
7451 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
7453 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7454 NULL, NULL);
7455 TREE_SIDE_EFFECTS (bind) = 1;
7456 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
7457 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
7459 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
7460 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7461 pop_gimplify_context
7462 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
7463 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
7465 gimplify_omp_ctxp = outer_ctx;
7467 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7468 && OMP_CLAUSE_LINEAR_STMT (c))
7470 gimplify_omp_ctxp = ctx;
7471 push_gimplify_context ();
7472 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
7474 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7475 NULL, NULL);
7476 TREE_SIDE_EFFECTS (bind) = 1;
7477 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
7478 OMP_CLAUSE_LINEAR_STMT (c) = bind;
7480 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
7481 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7482 pop_gimplify_context
7483 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
7484 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
7486 gimplify_omp_ctxp = outer_ctx;
7488 if (notice_outer)
7489 goto do_notice;
7490 break;
7492 case OMP_CLAUSE_COPYIN:
7493 case OMP_CLAUSE_COPYPRIVATE:
7494 decl = OMP_CLAUSE_DECL (c);
7495 if (error_operand_p (decl))
7497 remove = true;
7498 break;
7500 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
7501 && !remove
7502 && !omp_check_private (ctx, decl, true))
7504 remove = true;
7505 if (is_global_var (decl))
7507 if (DECL_THREAD_LOCAL_P (decl))
7508 remove = false;
7509 else if (DECL_HAS_VALUE_EXPR_P (decl))
7511 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7513 if (value
7514 && DECL_P (value)
7515 && DECL_THREAD_LOCAL_P (value))
7516 remove = false;
7519 if (remove)
7520 error_at (OMP_CLAUSE_LOCATION (c),
7521 "copyprivate variable %qE is not threadprivate"
7522 " or private in outer context", DECL_NAME (decl));
7524 do_notice:
7525 if (outer_ctx)
7526 omp_notice_variable (outer_ctx, decl, true);
7527 if (check_non_private
7528 && region_type == ORT_WORKSHARE
7529 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7530 || decl == OMP_CLAUSE_DECL (c)
7531 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
7532 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7533 == ADDR_EXPR
7534 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7535 == POINTER_PLUS_EXPR
7536 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
7537 (OMP_CLAUSE_DECL (c), 0), 0))
7538 == ADDR_EXPR)))))
7539 && omp_check_private (ctx, decl, false))
7541 error ("%s variable %qE is private in outer context",
7542 check_non_private, DECL_NAME (decl));
7543 remove = true;
7545 break;
7547 case OMP_CLAUSE_IF:
7548 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
7549 && OMP_CLAUSE_IF_MODIFIER (c) != code)
7551 const char *p[2];
7552 for (int i = 0; i < 2; i++)
7553 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
7555 case OMP_PARALLEL: p[i] = "parallel"; break;
7556 case OMP_TASK: p[i] = "task"; break;
7557 case OMP_TASKLOOP: p[i] = "taskloop"; break;
7558 case OMP_TARGET_DATA: p[i] = "target data"; break;
7559 case OMP_TARGET: p[i] = "target"; break;
7560 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
7561 case OMP_TARGET_ENTER_DATA:
7562 p[i] = "target enter data"; break;
7563 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
7564 default: gcc_unreachable ();
7566 error_at (OMP_CLAUSE_LOCATION (c),
7567 "expected %qs %<if%> clause modifier rather than %qs",
7568 p[0], p[1]);
7569 remove = true;
7571 /* Fall through. */
7573 case OMP_CLAUSE_FINAL:
7574 OMP_CLAUSE_OPERAND (c, 0)
7575 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
7576 /* Fall through. */
7578 case OMP_CLAUSE_SCHEDULE:
7579 case OMP_CLAUSE_NUM_THREADS:
7580 case OMP_CLAUSE_NUM_TEAMS:
7581 case OMP_CLAUSE_THREAD_LIMIT:
7582 case OMP_CLAUSE_DIST_SCHEDULE:
7583 case OMP_CLAUSE_DEVICE:
7584 case OMP_CLAUSE_PRIORITY:
7585 case OMP_CLAUSE_GRAINSIZE:
7586 case OMP_CLAUSE_NUM_TASKS:
7587 case OMP_CLAUSE_HINT:
7588 case OMP_CLAUSE__CILK_FOR_COUNT_:
7589 case OMP_CLAUSE_ASYNC:
7590 case OMP_CLAUSE_WAIT:
7591 case OMP_CLAUSE_NUM_GANGS:
7592 case OMP_CLAUSE_NUM_WORKERS:
7593 case OMP_CLAUSE_VECTOR_LENGTH:
7594 case OMP_CLAUSE_WORKER:
7595 case OMP_CLAUSE_VECTOR:
7596 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7597 is_gimple_val, fb_rvalue) == GS_ERROR)
7598 remove = true;
7599 break;
7601 case OMP_CLAUSE_GANG:
7602 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7603 is_gimple_val, fb_rvalue) == GS_ERROR)
7604 remove = true;
7605 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
7606 is_gimple_val, fb_rvalue) == GS_ERROR)
7607 remove = true;
7608 break;
7610 case OMP_CLAUSE_TILE:
7611 for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list;
7612 list = TREE_CHAIN (list))
7614 if (gimplify_expr (&TREE_VALUE (list), pre_p, NULL,
7615 is_gimple_val, fb_rvalue) == GS_ERROR)
7616 remove = true;
7618 break;
7620 case OMP_CLAUSE_NOWAIT:
7621 case OMP_CLAUSE_ORDERED:
7622 case OMP_CLAUSE_UNTIED:
7623 case OMP_CLAUSE_COLLAPSE:
7624 case OMP_CLAUSE_AUTO:
7625 case OMP_CLAUSE_SEQ:
7626 case OMP_CLAUSE_INDEPENDENT:
7627 case OMP_CLAUSE_MERGEABLE:
7628 case OMP_CLAUSE_PROC_BIND:
7629 case OMP_CLAUSE_SAFELEN:
7630 case OMP_CLAUSE_SIMDLEN:
7631 case OMP_CLAUSE_NOGROUP:
7632 case OMP_CLAUSE_THREADS:
7633 case OMP_CLAUSE_SIMD:
7634 break;
7636 case OMP_CLAUSE_DEFAULTMAP:
7637 ctx->target_map_scalars_firstprivate = false;
7638 break;
7640 case OMP_CLAUSE_ALIGNED:
7641 decl = OMP_CLAUSE_DECL (c);
7642 if (error_operand_p (decl))
7644 remove = true;
7645 break;
7647 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
7648 is_gimple_val, fb_rvalue) == GS_ERROR)
7650 remove = true;
7651 break;
7653 if (!is_global_var (decl)
7654 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7655 omp_add_variable (ctx, decl, GOVD_ALIGNED);
7656 break;
7658 case OMP_CLAUSE_DEFAULT:
7659 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
7660 break;
7662 default:
7663 gcc_unreachable ();
7666 if (code == OACC_DATA
7667 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7668 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
7669 remove = true;
7670 if (remove)
7671 *list_p = OMP_CLAUSE_CHAIN (c);
7672 else
7673 list_p = &OMP_CLAUSE_CHAIN (c);
7676 gimplify_omp_ctxp = ctx;
7677 if (struct_map_to_clause)
7678 delete struct_map_to_clause;
7681 /* Return true if DECL is a candidate for shared to firstprivate
7682 optimization. We only consider non-addressable scalars, not
7683 too big, and not references. */
7685 static bool
7686 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
7688 if (TREE_ADDRESSABLE (decl))
7689 return false;
7690 tree type = TREE_TYPE (decl);
7691 if (!is_gimple_reg_type (type)
7692 || TREE_CODE (type) == REFERENCE_TYPE
7693 || TREE_ADDRESSABLE (type))
7694 return false;
7695 /* Don't optimize too large decls, as each thread/task will have
7696 its own. */
7697 HOST_WIDE_INT len = int_size_in_bytes (type);
7698 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
7699 return false;
7700 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7701 return false;
7702 return true;
7705 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
7706 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
7707 GOVD_WRITTEN in outer contexts. */
7709 static void
7710 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
7712 for (; ctx; ctx = ctx->outer_context)
7714 splay_tree_node n = splay_tree_lookup (ctx->variables,
7715 (splay_tree_key) decl);
7716 if (n == NULL)
7717 continue;
7718 else if (n->value & GOVD_SHARED)
7720 n->value |= GOVD_WRITTEN;
7721 return;
7723 else if (n->value & GOVD_DATA_SHARE_CLASS)
7724 return;
7728 /* Helper callback for walk_gimple_seq to discover possible stores
7729 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7730 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7731 for those. */
7733 static tree
7734 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
7736 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
7738 *walk_subtrees = 0;
7739 if (!wi->is_lhs)
7740 return NULL_TREE;
7742 tree op = *tp;
7745 if (handled_component_p (op))
7746 op = TREE_OPERAND (op, 0);
7747 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
7748 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
7749 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
7750 else
7751 break;
7753 while (1);
7754 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
7755 return NULL_TREE;
7757 omp_mark_stores (gimplify_omp_ctxp, op);
7758 return NULL_TREE;
7761 /* Helper callback for walk_gimple_seq to discover possible stores
7762 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7763 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7764 for those. */
7766 static tree
7767 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
7768 bool *handled_ops_p,
7769 struct walk_stmt_info *wi)
7771 gimple *stmt = gsi_stmt (*gsi_p);
7772 switch (gimple_code (stmt))
7774 /* Don't recurse on OpenMP constructs for which
7775 gimplify_adjust_omp_clauses already handled the bodies,
7776 except handle gimple_omp_for_pre_body. */
7777 case GIMPLE_OMP_FOR:
7778 *handled_ops_p = true;
7779 if (gimple_omp_for_pre_body (stmt))
7780 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
7781 omp_find_stores_stmt, omp_find_stores_op, wi);
7782 break;
7783 case GIMPLE_OMP_PARALLEL:
7784 case GIMPLE_OMP_TASK:
7785 case GIMPLE_OMP_SECTIONS:
7786 case GIMPLE_OMP_SINGLE:
7787 case GIMPLE_OMP_TARGET:
7788 case GIMPLE_OMP_TEAMS:
7789 case GIMPLE_OMP_CRITICAL:
7790 *handled_ops_p = true;
7791 break;
7792 default:
7793 break;
7795 return NULL_TREE;
7798 struct gimplify_adjust_omp_clauses_data
7800 tree *list_p;
7801 gimple_seq *pre_p;
7804 /* For all variables that were not actually used within the context,
7805 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
7807 static int
7808 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
7810 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
7811 gimple_seq *pre_p
7812 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
7813 tree decl = (tree) n->key;
7814 unsigned flags = n->value;
7815 enum omp_clause_code code;
7816 tree clause;
7817 bool private_debug;
7819 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
7820 return 0;
7821 if ((flags & GOVD_SEEN) == 0)
7822 return 0;
7823 if (flags & GOVD_DEBUG_PRIVATE)
7825 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
7826 private_debug = true;
7828 else if (flags & GOVD_MAP)
7829 private_debug = false;
7830 else
7831 private_debug
7832 = lang_hooks.decls.omp_private_debug_clause (decl,
7833 !!(flags & GOVD_SHARED));
7834 if (private_debug)
7835 code = OMP_CLAUSE_PRIVATE;
7836 else if (flags & GOVD_MAP)
7837 code = OMP_CLAUSE_MAP;
7838 else if (flags & GOVD_SHARED)
7840 if (is_global_var (decl))
7842 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7843 while (ctx != NULL)
7845 splay_tree_node on
7846 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7847 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7848 | GOVD_PRIVATE | GOVD_REDUCTION
7849 | GOVD_LINEAR | GOVD_MAP)) != 0)
7850 break;
7851 ctx = ctx->outer_context;
7853 if (ctx == NULL)
7854 return 0;
7856 code = OMP_CLAUSE_SHARED;
7858 else if (flags & GOVD_PRIVATE)
7859 code = OMP_CLAUSE_PRIVATE;
7860 else if (flags & GOVD_FIRSTPRIVATE)
7861 code = OMP_CLAUSE_FIRSTPRIVATE;
7862 else if (flags & GOVD_LASTPRIVATE)
7863 code = OMP_CLAUSE_LASTPRIVATE;
7864 else if (flags & GOVD_ALIGNED)
7865 return 0;
7866 else
7867 gcc_unreachable ();
7869 if (((flags & GOVD_LASTPRIVATE)
7870 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
7871 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7872 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7874 clause = build_omp_clause (input_location, code);
7875 OMP_CLAUSE_DECL (clause) = decl;
7876 OMP_CLAUSE_CHAIN (clause) = *list_p;
7877 if (private_debug)
7878 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
7879 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
7880 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
7881 else if (code == OMP_CLAUSE_SHARED
7882 && (flags & GOVD_WRITTEN) == 0
7883 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7884 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
7885 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
7886 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
7887 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
7889 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
7890 OMP_CLAUSE_DECL (nc) = decl;
7891 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7892 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
7893 OMP_CLAUSE_DECL (clause)
7894 = build_simple_mem_ref_loc (input_location, decl);
7895 OMP_CLAUSE_DECL (clause)
7896 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
7897 build_int_cst (build_pointer_type (char_type_node), 0));
7898 OMP_CLAUSE_SIZE (clause) = size_zero_node;
7899 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7900 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
7901 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
7902 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7903 OMP_CLAUSE_CHAIN (nc) = *list_p;
7904 OMP_CLAUSE_CHAIN (clause) = nc;
7905 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7906 gimplify_omp_ctxp = ctx->outer_context;
7907 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
7908 pre_p, NULL, is_gimple_val, fb_rvalue);
7909 gimplify_omp_ctxp = ctx;
7911 else if (code == OMP_CLAUSE_MAP)
7913 int kind = (flags & GOVD_MAP_TO_ONLY
7914 ? GOMP_MAP_TO
7915 : GOMP_MAP_TOFROM);
7916 if (flags & GOVD_MAP_FORCE)
7917 kind |= GOMP_MAP_FLAG_FORCE;
7918 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
7919 if (DECL_SIZE (decl)
7920 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7922 tree decl2 = DECL_VALUE_EXPR (decl);
7923 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7924 decl2 = TREE_OPERAND (decl2, 0);
7925 gcc_assert (DECL_P (decl2));
7926 tree mem = build_simple_mem_ref (decl2);
7927 OMP_CLAUSE_DECL (clause) = mem;
7928 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7929 if (gimplify_omp_ctxp->outer_context)
7931 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7932 omp_notice_variable (ctx, decl2, true);
7933 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
7935 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7936 OMP_CLAUSE_MAP);
7937 OMP_CLAUSE_DECL (nc) = decl;
7938 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7939 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
7940 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7941 else
7942 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
7943 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7944 OMP_CLAUSE_CHAIN (clause) = nc;
7946 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7947 && lang_hooks.decls.omp_privatize_by_reference (decl))
7949 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
7950 OMP_CLAUSE_SIZE (clause)
7951 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
7952 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7953 gimplify_omp_ctxp = ctx->outer_context;
7954 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
7955 pre_p, NULL, is_gimple_val, fb_rvalue);
7956 gimplify_omp_ctxp = ctx;
7957 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7958 OMP_CLAUSE_MAP);
7959 OMP_CLAUSE_DECL (nc) = decl;
7960 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7961 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
7962 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7963 OMP_CLAUSE_CHAIN (clause) = nc;
7965 else
7966 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
7968 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
7970 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
7971 OMP_CLAUSE_DECL (nc) = decl;
7972 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
7973 OMP_CLAUSE_CHAIN (nc) = *list_p;
7974 OMP_CLAUSE_CHAIN (clause) = nc;
7975 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7976 gimplify_omp_ctxp = ctx->outer_context;
7977 lang_hooks.decls.omp_finish_clause (nc, pre_p);
7978 gimplify_omp_ctxp = ctx;
7980 *list_p = clause;
7981 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7982 gimplify_omp_ctxp = ctx->outer_context;
7983 lang_hooks.decls.omp_finish_clause (clause, pre_p);
7984 gimplify_omp_ctxp = ctx;
7985 return 0;
7988 static void
7989 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
7990 enum tree_code code)
7992 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7993 tree c, decl;
7995 if (body)
7997 struct gimplify_omp_ctx *octx;
7998 for (octx = ctx; octx; octx = octx->outer_context)
7999 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8000 break;
8001 if (octx)
8003 struct walk_stmt_info wi;
8004 memset (&wi, 0, sizeof (wi));
8005 walk_gimple_seq (body, omp_find_stores_stmt,
8006 omp_find_stores_op, &wi);
8009 while ((c = *list_p) != NULL)
8011 splay_tree_node n;
8012 bool remove = false;
8014 switch (OMP_CLAUSE_CODE (c))
8016 case OMP_CLAUSE_PRIVATE:
8017 case OMP_CLAUSE_SHARED:
8018 case OMP_CLAUSE_FIRSTPRIVATE:
8019 case OMP_CLAUSE_LINEAR:
8020 decl = OMP_CLAUSE_DECL (c);
8021 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8022 remove = !(n->value & GOVD_SEEN);
8023 if (! remove)
8025 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8026 if ((n->value & GOVD_DEBUG_PRIVATE)
8027 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8029 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8030 || ((n->value & GOVD_DATA_SHARE_CLASS)
8031 == GOVD_PRIVATE));
8032 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8033 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8035 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8036 && (n->value & GOVD_WRITTEN) == 0
8037 && DECL_P (decl)
8038 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8039 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8040 else if (DECL_P (decl)
8041 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8042 && (n->value & GOVD_WRITTEN) != 1)
8043 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8044 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8045 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8046 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8048 break;
8050 case OMP_CLAUSE_LASTPRIVATE:
8051 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8052 accurately reflect the presence of a FIRSTPRIVATE clause. */
8053 decl = OMP_CLAUSE_DECL (c);
8054 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8055 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8056 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8057 if (omp_no_lastprivate (ctx))
8059 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8060 remove = true;
8061 else
8062 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE;
8064 else if (code == OMP_DISTRIBUTE
8065 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8067 remove = true;
8068 error_at (OMP_CLAUSE_LOCATION (c),
8069 "same variable used in %<firstprivate%> and "
8070 "%<lastprivate%> clauses on %<distribute%> "
8071 "construct");
8073 if (!remove
8074 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8075 && DECL_P (decl)
8076 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8077 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8078 break;
8080 case OMP_CLAUSE_ALIGNED:
8081 decl = OMP_CLAUSE_DECL (c);
8082 if (!is_global_var (decl))
8084 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8085 remove = n == NULL || !(n->value & GOVD_SEEN);
8086 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8088 struct gimplify_omp_ctx *octx;
8089 if (n != NULL
8090 && (n->value & (GOVD_DATA_SHARE_CLASS
8091 & ~GOVD_FIRSTPRIVATE)))
8092 remove = true;
8093 else
8094 for (octx = ctx->outer_context; octx;
8095 octx = octx->outer_context)
8097 n = splay_tree_lookup (octx->variables,
8098 (splay_tree_key) decl);
8099 if (n == NULL)
8100 continue;
8101 if (n->value & GOVD_LOCAL)
8102 break;
8103 /* We have to avoid assigning a shared variable
8104 to itself when trying to add
8105 __builtin_assume_aligned. */
8106 if (n->value & GOVD_SHARED)
8108 remove = true;
8109 break;
8114 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8116 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8117 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8118 remove = true;
8120 break;
8122 case OMP_CLAUSE_MAP:
8123 if (code == OMP_TARGET_EXIT_DATA
8124 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8126 remove = true;
8127 break;
8129 decl = OMP_CLAUSE_DECL (c);
8130 /* Data clauses associated with acc parallel reductions must be
8131 compatible with present_or_copy. Warn and adjust the clause
8132 if that is not the case. */
8133 if (ctx->region_type == ORT_ACC_PARALLEL)
8135 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
8136 n = NULL;
8138 if (DECL_P (t))
8139 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8141 if (n && (n->value & GOVD_REDUCTION))
8143 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
8145 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
8146 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
8147 && kind != GOMP_MAP_FORCE_PRESENT
8148 && kind != GOMP_MAP_POINTER)
8150 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8151 "incompatible data clause with reduction "
8152 "on %qE; promoting to present_or_copy",
8153 DECL_NAME (t));
8154 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
8158 if (!DECL_P (decl))
8160 if ((ctx->region_type & ORT_TARGET) != 0
8161 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8163 if (TREE_CODE (decl) == INDIRECT_REF
8164 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8165 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8166 == REFERENCE_TYPE))
8167 decl = TREE_OPERAND (decl, 0);
8168 if (TREE_CODE (decl) == COMPONENT_REF)
8170 while (TREE_CODE (decl) == COMPONENT_REF)
8171 decl = TREE_OPERAND (decl, 0);
8172 if (DECL_P (decl))
8174 n = splay_tree_lookup (ctx->variables,
8175 (splay_tree_key) decl);
8176 if (!(n->value & GOVD_SEEN))
8177 remove = true;
8181 break;
8183 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8184 if ((ctx->region_type & ORT_TARGET) != 0
8185 && !(n->value & GOVD_SEEN)
8186 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
8187 && !lookup_attribute ("omp declare target link",
8188 DECL_ATTRIBUTES (decl)))
8190 remove = true;
8191 /* For struct element mapping, if struct is never referenced
8192 in target block and none of the mapping has always modifier,
8193 remove all the struct element mappings, which immediately
8194 follow the GOMP_MAP_STRUCT map clause. */
8195 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
8197 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
8198 while (cnt--)
8199 OMP_CLAUSE_CHAIN (c)
8200 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
8203 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
8204 && code == OMP_TARGET_EXIT_DATA)
8205 remove = true;
8206 else if (DECL_SIZE (decl)
8207 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
8208 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
8209 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
8210 && (OMP_CLAUSE_MAP_KIND (c)
8211 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8213 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
8214 for these, TREE_CODE (DECL_SIZE (decl)) will always be
8215 INTEGER_CST. */
8216 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
8218 tree decl2 = DECL_VALUE_EXPR (decl);
8219 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8220 decl2 = TREE_OPERAND (decl2, 0);
8221 gcc_assert (DECL_P (decl2));
8222 tree mem = build_simple_mem_ref (decl2);
8223 OMP_CLAUSE_DECL (c) = mem;
8224 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8225 if (ctx->outer_context)
8227 omp_notice_variable (ctx->outer_context, decl2, true);
8228 omp_notice_variable (ctx->outer_context,
8229 OMP_CLAUSE_SIZE (c), true);
8231 if (((ctx->region_type & ORT_TARGET) != 0
8232 || !ctx->target_firstprivatize_array_bases)
8233 && ((n->value & GOVD_SEEN) == 0
8234 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
8236 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8237 OMP_CLAUSE_MAP);
8238 OMP_CLAUSE_DECL (nc) = decl;
8239 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8240 if (ctx->target_firstprivatize_array_bases)
8241 OMP_CLAUSE_SET_MAP_KIND (nc,
8242 GOMP_MAP_FIRSTPRIVATE_POINTER);
8243 else
8244 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8245 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
8246 OMP_CLAUSE_CHAIN (c) = nc;
8247 c = nc;
8250 else
8252 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8253 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
8254 gcc_assert ((n->value & GOVD_SEEN) == 0
8255 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
8256 == 0));
8258 break;
8260 case OMP_CLAUSE_TO:
8261 case OMP_CLAUSE_FROM:
8262 case OMP_CLAUSE__CACHE_:
8263 decl = OMP_CLAUSE_DECL (c);
8264 if (!DECL_P (decl))
8265 break;
8266 if (DECL_SIZE (decl)
8267 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8269 tree decl2 = DECL_VALUE_EXPR (decl);
8270 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8271 decl2 = TREE_OPERAND (decl2, 0);
8272 gcc_assert (DECL_P (decl2));
8273 tree mem = build_simple_mem_ref (decl2);
8274 OMP_CLAUSE_DECL (c) = mem;
8275 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8276 if (ctx->outer_context)
8278 omp_notice_variable (ctx->outer_context, decl2, true);
8279 omp_notice_variable (ctx->outer_context,
8280 OMP_CLAUSE_SIZE (c), true);
8283 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8284 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
8285 break;
8287 case OMP_CLAUSE_REDUCTION:
8288 decl = OMP_CLAUSE_DECL (c);
8289 /* OpenACC reductions need a present_or_copy data clause.
8290 Add one if necessary. Error is the reduction is private. */
8291 if (ctx->region_type == ORT_ACC_PARALLEL)
8293 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8294 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
8295 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
8296 "reduction on %qE", DECL_NAME (decl));
8297 else if ((n->value & GOVD_MAP) == 0)
8299 tree next = OMP_CLAUSE_CHAIN (c);
8300 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
8301 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
8302 OMP_CLAUSE_DECL (nc) = decl;
8303 OMP_CLAUSE_CHAIN (c) = nc;
8304 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8305 while (1)
8307 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
8308 if (OMP_CLAUSE_CHAIN (nc) == NULL)
8309 break;
8310 nc = OMP_CLAUSE_CHAIN (nc);
8312 OMP_CLAUSE_CHAIN (nc) = next;
8313 n->value |= GOVD_MAP;
8316 if (DECL_P (decl)
8317 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8318 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8319 break;
8320 case OMP_CLAUSE_COPYIN:
8321 case OMP_CLAUSE_COPYPRIVATE:
8322 case OMP_CLAUSE_IF:
8323 case OMP_CLAUSE_NUM_THREADS:
8324 case OMP_CLAUSE_NUM_TEAMS:
8325 case OMP_CLAUSE_THREAD_LIMIT:
8326 case OMP_CLAUSE_DIST_SCHEDULE:
8327 case OMP_CLAUSE_DEVICE:
8328 case OMP_CLAUSE_SCHEDULE:
8329 case OMP_CLAUSE_NOWAIT:
8330 case OMP_CLAUSE_ORDERED:
8331 case OMP_CLAUSE_DEFAULT:
8332 case OMP_CLAUSE_UNTIED:
8333 case OMP_CLAUSE_COLLAPSE:
8334 case OMP_CLAUSE_FINAL:
8335 case OMP_CLAUSE_MERGEABLE:
8336 case OMP_CLAUSE_PROC_BIND:
8337 case OMP_CLAUSE_SAFELEN:
8338 case OMP_CLAUSE_SIMDLEN:
8339 case OMP_CLAUSE_DEPEND:
8340 case OMP_CLAUSE_PRIORITY:
8341 case OMP_CLAUSE_GRAINSIZE:
8342 case OMP_CLAUSE_NUM_TASKS:
8343 case OMP_CLAUSE_NOGROUP:
8344 case OMP_CLAUSE_THREADS:
8345 case OMP_CLAUSE_SIMD:
8346 case OMP_CLAUSE_HINT:
8347 case OMP_CLAUSE_DEFAULTMAP:
8348 case OMP_CLAUSE_USE_DEVICE_PTR:
8349 case OMP_CLAUSE_IS_DEVICE_PTR:
8350 case OMP_CLAUSE__CILK_FOR_COUNT_:
8351 case OMP_CLAUSE_ASYNC:
8352 case OMP_CLAUSE_WAIT:
8353 case OMP_CLAUSE_INDEPENDENT:
8354 case OMP_CLAUSE_NUM_GANGS:
8355 case OMP_CLAUSE_NUM_WORKERS:
8356 case OMP_CLAUSE_VECTOR_LENGTH:
8357 case OMP_CLAUSE_GANG:
8358 case OMP_CLAUSE_WORKER:
8359 case OMP_CLAUSE_VECTOR:
8360 case OMP_CLAUSE_AUTO:
8361 case OMP_CLAUSE_SEQ:
8362 break;
8364 case OMP_CLAUSE_TILE:
8365 /* We're not yet making use of the information provided by OpenACC
8366 tile clauses. Discard these here, to simplify later middle end
8367 processing. */
8368 remove = true;
8369 break;
8371 default:
8372 gcc_unreachable ();
8375 if (remove)
8376 *list_p = OMP_CLAUSE_CHAIN (c);
8377 else
8378 list_p = &OMP_CLAUSE_CHAIN (c);
8381 /* Add in any implicit data sharing. */
8382 struct gimplify_adjust_omp_clauses_data data;
8383 data.list_p = list_p;
8384 data.pre_p = pre_p;
8385 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
8387 gimplify_omp_ctxp = ctx->outer_context;
8388 delete_omp_context (ctx);
8391 /* Gimplify OACC_CACHE. */
8393 static void
8394 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
8396 tree expr = *expr_p;
8398 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
8399 OACC_CACHE);
8400 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
8401 OACC_CACHE);
8403 /* TODO: Do something sensible with this information. */
8405 *expr_p = NULL_TREE;
8408 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
8409 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
8410 kind. The entry kind will replace the one in CLAUSE, while the exit
8411 kind will be used in a new omp_clause and returned to the caller. */
8413 static tree
8414 gimplify_oacc_declare_1 (tree clause)
8416 HOST_WIDE_INT kind, new_op;
8417 bool ret = false;
8418 tree c = NULL;
8420 kind = OMP_CLAUSE_MAP_KIND (clause);
8422 switch (kind)
8424 case GOMP_MAP_ALLOC:
8425 case GOMP_MAP_FORCE_ALLOC:
8426 case GOMP_MAP_FORCE_TO:
8427 new_op = GOMP_MAP_DELETE;
8428 ret = true;
8429 break;
8431 case GOMP_MAP_FORCE_FROM:
8432 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8433 new_op = GOMP_MAP_FORCE_FROM;
8434 ret = true;
8435 break;
8437 case GOMP_MAP_FORCE_TOFROM:
8438 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
8439 new_op = GOMP_MAP_FORCE_FROM;
8440 ret = true;
8441 break;
8443 case GOMP_MAP_FROM:
8444 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8445 new_op = GOMP_MAP_FROM;
8446 ret = true;
8447 break;
8449 case GOMP_MAP_TOFROM:
8450 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
8451 new_op = GOMP_MAP_FROM;
8452 ret = true;
8453 break;
8455 case GOMP_MAP_DEVICE_RESIDENT:
8456 case GOMP_MAP_FORCE_DEVICEPTR:
8457 case GOMP_MAP_FORCE_PRESENT:
8458 case GOMP_MAP_LINK:
8459 case GOMP_MAP_POINTER:
8460 case GOMP_MAP_TO:
8461 break;
8463 default:
8464 gcc_unreachable ();
8465 break;
8468 if (ret)
8470 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
8471 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
8472 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
8475 return c;
8478 /* Gimplify OACC_DECLARE. */
8480 static void
8481 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
8483 tree expr = *expr_p;
8484 gomp_target *stmt;
8485 tree clauses, t;
8487 clauses = OACC_DECLARE_CLAUSES (expr);
8489 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
8491 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
8493 tree decl = OMP_CLAUSE_DECL (t);
8495 if (TREE_CODE (decl) == MEM_REF)
8496 continue;
8498 if (TREE_CODE (decl) == VAR_DECL
8499 && !is_global_var (decl)
8500 && DECL_CONTEXT (decl) == current_function_decl)
8502 tree c = gimplify_oacc_declare_1 (t);
8503 if (c)
8505 if (oacc_declare_returns == NULL)
8506 oacc_declare_returns = new hash_map<tree, tree>;
8508 oacc_declare_returns->put (decl, c);
8512 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
8515 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
8516 clauses);
8518 gimplify_seq_add_stmt (pre_p, stmt);
8520 *expr_p = NULL_TREE;
8523 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
8524 gimplification of the body, as well as scanning the body for used
8525 variables. We need to do this scan now, because variable-sized
8526 decls will be decomposed during gimplification. */
8528 static void
8529 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
8531 tree expr = *expr_p;
8532 gimple *g;
8533 gimple_seq body = NULL;
8535 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
8536 OMP_PARALLEL_COMBINED (expr)
8537 ? ORT_COMBINED_PARALLEL
8538 : ORT_PARALLEL, OMP_PARALLEL);
8540 push_gimplify_context ();
8542 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
8543 if (gimple_code (g) == GIMPLE_BIND)
8544 pop_gimplify_context (g);
8545 else
8546 pop_gimplify_context (NULL);
8548 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
8549 OMP_PARALLEL);
8551 g = gimple_build_omp_parallel (body,
8552 OMP_PARALLEL_CLAUSES (expr),
8553 NULL_TREE, NULL_TREE);
8554 if (OMP_PARALLEL_COMBINED (expr))
8555 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
8556 gimplify_seq_add_stmt (pre_p, g);
8557 *expr_p = NULL_TREE;
8560 /* Gimplify the contents of an OMP_TASK statement. This involves
8561 gimplification of the body, as well as scanning the body for used
8562 variables. We need to do this scan now, because variable-sized
8563 decls will be decomposed during gimplification. */
8565 static void
8566 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
8568 tree expr = *expr_p;
8569 gimple *g;
8570 gimple_seq body = NULL;
8572 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
8573 find_omp_clause (OMP_TASK_CLAUSES (expr),
8574 OMP_CLAUSE_UNTIED)
8575 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
8577 push_gimplify_context ();
8579 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
8580 if (gimple_code (g) == GIMPLE_BIND)
8581 pop_gimplify_context (g);
8582 else
8583 pop_gimplify_context (NULL);
8585 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
8586 OMP_TASK);
8588 g = gimple_build_omp_task (body,
8589 OMP_TASK_CLAUSES (expr),
8590 NULL_TREE, NULL_TREE,
8591 NULL_TREE, NULL_TREE, NULL_TREE);
8592 gimplify_seq_add_stmt (pre_p, g);
8593 *expr_p = NULL_TREE;
8596 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
8597 with non-NULL OMP_FOR_INIT. */
8599 static tree
8600 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
8602 *walk_subtrees = 0;
8603 switch (TREE_CODE (*tp))
8605 case OMP_FOR:
8606 *walk_subtrees = 1;
8607 /* FALLTHRU */
8608 case OMP_SIMD:
8609 if (OMP_FOR_INIT (*tp) != NULL_TREE)
8610 return *tp;
8611 break;
8612 case BIND_EXPR:
8613 case STATEMENT_LIST:
8614 case OMP_PARALLEL:
8615 *walk_subtrees = 1;
8616 break;
8617 default:
8618 break;
8620 return NULL_TREE;
8623 /* Gimplify the gross structure of an OMP_FOR statement. */
8625 static enum gimplify_status
8626 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
8628 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
8629 enum gimplify_status ret = GS_ALL_DONE;
8630 enum gimplify_status tret;
8631 gomp_for *gfor;
8632 gimple_seq for_body, for_pre_body;
8633 int i;
8634 bitmap has_decl_expr = NULL;
8635 enum omp_region_type ort = ORT_WORKSHARE;
8637 orig_for_stmt = for_stmt = *expr_p;
8639 switch (TREE_CODE (for_stmt))
8641 case OMP_FOR:
8642 case CILK_FOR:
8643 case OMP_DISTRIBUTE:
8644 break;
8645 case OACC_LOOP:
8646 ort = ORT_ACC;
8647 break;
8648 case OMP_TASKLOOP:
8649 if (find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
8650 ort = ORT_UNTIED_TASK;
8651 else
8652 ort = ORT_TASK;
8653 break;
8654 case OMP_SIMD:
8655 case CILK_SIMD:
8656 ort = ORT_SIMD;
8657 break;
8658 default:
8659 gcc_unreachable ();
8662 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
8663 clause for the IV. */
8664 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8666 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
8667 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8668 decl = TREE_OPERAND (t, 0);
8669 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8670 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8671 && OMP_CLAUSE_DECL (c) == decl)
8673 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8674 break;
8678 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8680 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
8681 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
8682 find_combined_omp_for, NULL, NULL);
8683 if (inner_for_stmt == NULL_TREE)
8685 gcc_assert (seen_error ());
8686 *expr_p = NULL_TREE;
8687 return GS_ERROR;
8691 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
8692 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
8693 TREE_CODE (for_stmt));
8695 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
8696 gimplify_omp_ctxp->distribute = true;
8698 /* Handle OMP_FOR_INIT. */
8699 for_pre_body = NULL;
8700 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
8702 has_decl_expr = BITMAP_ALLOC (NULL);
8703 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
8704 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
8705 == VAR_DECL)
8707 t = OMP_FOR_PRE_BODY (for_stmt);
8708 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8710 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
8712 tree_stmt_iterator si;
8713 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
8714 tsi_next (&si))
8716 t = tsi_stmt (si);
8717 if (TREE_CODE (t) == DECL_EXPR
8718 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
8719 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8723 if (OMP_FOR_PRE_BODY (for_stmt))
8725 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
8726 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8727 else
8729 struct gimplify_omp_ctx ctx;
8730 memset (&ctx, 0, sizeof (ctx));
8731 ctx.region_type = ORT_NONE;
8732 gimplify_omp_ctxp = &ctx;
8733 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8734 gimplify_omp_ctxp = NULL;
8737 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
8739 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8740 for_stmt = inner_for_stmt;
8742 /* For taskloop, need to gimplify the start, end and step before the
8743 taskloop, outside of the taskloop omp context. */
8744 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8746 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8748 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8749 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8751 TREE_OPERAND (t, 1)
8752 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8753 pre_p, NULL, false);
8754 tree c = build_omp_clause (input_location,
8755 OMP_CLAUSE_FIRSTPRIVATE);
8756 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8757 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8758 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8761 /* Handle OMP_FOR_COND. */
8762 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8763 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8765 TREE_OPERAND (t, 1)
8766 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8767 gimple_seq_empty_p (for_pre_body)
8768 ? pre_p : &for_pre_body, NULL,
8769 false);
8770 tree c = build_omp_clause (input_location,
8771 OMP_CLAUSE_FIRSTPRIVATE);
8772 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8773 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8774 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8777 /* Handle OMP_FOR_INCR. */
8778 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8779 if (TREE_CODE (t) == MODIFY_EXPR)
8781 decl = TREE_OPERAND (t, 0);
8782 t = TREE_OPERAND (t, 1);
8783 tree *tp = &TREE_OPERAND (t, 1);
8784 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
8785 tp = &TREE_OPERAND (t, 0);
8787 if (!is_gimple_constant (*tp))
8789 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
8790 ? pre_p : &for_pre_body;
8791 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
8792 tree c = build_omp_clause (input_location,
8793 OMP_CLAUSE_FIRSTPRIVATE);
8794 OMP_CLAUSE_DECL (c) = *tp;
8795 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8796 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8801 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
8802 OMP_TASKLOOP);
8805 if (orig_for_stmt != for_stmt)
8806 gimplify_omp_ctxp->combined_loop = true;
8808 for_body = NULL;
8809 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8810 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
8811 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8812 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
8814 tree c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
8815 bool is_doacross = false;
8816 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
8818 is_doacross = true;
8819 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
8820 (OMP_FOR_INIT (for_stmt))
8821 * 2);
8823 int collapse = 1;
8824 c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
8825 if (c)
8826 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
8827 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8829 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8830 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8831 decl = TREE_OPERAND (t, 0);
8832 gcc_assert (DECL_P (decl));
8833 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
8834 || POINTER_TYPE_P (TREE_TYPE (decl)));
8835 if (is_doacross)
8837 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
8838 gimplify_omp_ctxp->loop_iter_var.quick_push
8839 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
8840 else
8841 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8842 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8845 /* Make sure the iteration variable is private. */
8846 tree c = NULL_TREE;
8847 tree c2 = NULL_TREE;
8848 if (orig_for_stmt != for_stmt)
8849 /* Do this only on innermost construct for combined ones. */;
8850 else if (ort == ORT_SIMD)
8852 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
8853 (splay_tree_key) decl);
8854 omp_is_private (gimplify_omp_ctxp, decl,
8855 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8856 != 1));
8857 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8858 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8859 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8861 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8862 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8863 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
8864 if ((has_decl_expr
8865 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8866 || omp_no_lastprivate (gimplify_omp_ctxp))
8868 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8869 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8871 struct gimplify_omp_ctx *outer
8872 = gimplify_omp_ctxp->outer_context;
8873 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8875 if (outer->region_type == ORT_WORKSHARE
8876 && outer->combined_loop)
8878 n = splay_tree_lookup (outer->variables,
8879 (splay_tree_key)decl);
8880 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8882 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8883 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8885 else
8887 struct gimplify_omp_ctx *octx = outer->outer_context;
8888 if (octx
8889 && octx->region_type == ORT_COMBINED_PARALLEL
8890 && octx->outer_context
8891 && (octx->outer_context->region_type
8892 == ORT_WORKSHARE)
8893 && octx->outer_context->combined_loop)
8895 octx = octx->outer_context;
8896 n = splay_tree_lookup (octx->variables,
8897 (splay_tree_key)decl);
8898 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8900 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8901 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8908 OMP_CLAUSE_DECL (c) = decl;
8909 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8910 OMP_FOR_CLAUSES (for_stmt) = c;
8911 omp_add_variable (gimplify_omp_ctxp, decl, flags);
8912 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8914 if (outer->region_type == ORT_WORKSHARE
8915 && outer->combined_loop)
8917 if (outer->outer_context
8918 && (outer->outer_context->region_type
8919 == ORT_COMBINED_PARALLEL))
8920 outer = outer->outer_context;
8921 else if (omp_check_private (outer, decl, false))
8922 outer = NULL;
8924 else if (((outer->region_type & ORT_TASK) != 0)
8925 && outer->combined_loop
8926 && !omp_check_private (gimplify_omp_ctxp,
8927 decl, false))
8929 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8931 omp_notice_variable (outer, decl, true);
8932 outer = NULL;
8934 if (outer)
8936 n = splay_tree_lookup (outer->variables,
8937 (splay_tree_key)decl);
8938 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8940 omp_add_variable (outer, decl,
8941 GOVD_LASTPRIVATE | GOVD_SEEN);
8942 if (outer->region_type == ORT_COMBINED_PARALLEL
8943 && outer->outer_context
8944 && (outer->outer_context->region_type
8945 == ORT_WORKSHARE)
8946 && outer->outer_context->combined_loop)
8948 outer = outer->outer_context;
8949 n = splay_tree_lookup (outer->variables,
8950 (splay_tree_key)decl);
8951 if (omp_check_private (outer, decl, false))
8952 outer = NULL;
8953 else if (n == NULL
8954 || ((n->value & GOVD_DATA_SHARE_CLASS)
8955 == 0))
8956 omp_add_variable (outer, decl,
8957 GOVD_LASTPRIVATE
8958 | GOVD_SEEN);
8959 else
8960 outer = NULL;
8962 if (outer && outer->outer_context
8963 && (outer->outer_context->region_type
8964 == ORT_COMBINED_TEAMS))
8966 outer = outer->outer_context;
8967 n = splay_tree_lookup (outer->variables,
8968 (splay_tree_key)decl);
8969 if (n == NULL
8970 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8971 omp_add_variable (outer, decl,
8972 GOVD_SHARED | GOVD_SEEN);
8973 else
8974 outer = NULL;
8976 if (outer && outer->outer_context)
8977 omp_notice_variable (outer->outer_context, decl,
8978 true);
8983 else
8985 bool lastprivate
8986 = (!has_decl_expr
8987 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8988 && !omp_no_lastprivate (gimplify_omp_ctxp);
8989 struct gimplify_omp_ctx *outer
8990 = gimplify_omp_ctxp->outer_context;
8991 if (outer && lastprivate)
8993 if (outer->region_type == ORT_WORKSHARE
8994 && outer->combined_loop)
8996 n = splay_tree_lookup (outer->variables,
8997 (splay_tree_key)decl);
8998 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9000 lastprivate = false;
9001 outer = NULL;
9003 else if (outer->outer_context
9004 && (outer->outer_context->region_type
9005 == ORT_COMBINED_PARALLEL))
9006 outer = outer->outer_context;
9007 else if (omp_check_private (outer, decl, false))
9008 outer = NULL;
9010 else if (((outer->region_type & ORT_TASK) != 0)
9011 && outer->combined_loop
9012 && !omp_check_private (gimplify_omp_ctxp,
9013 decl, false))
9015 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9017 omp_notice_variable (outer, decl, true);
9018 outer = NULL;
9020 if (outer)
9022 n = splay_tree_lookup (outer->variables,
9023 (splay_tree_key)decl);
9024 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9026 omp_add_variable (outer, decl,
9027 GOVD_LASTPRIVATE | GOVD_SEEN);
9028 if (outer->region_type == ORT_COMBINED_PARALLEL
9029 && outer->outer_context
9030 && (outer->outer_context->region_type
9031 == ORT_WORKSHARE)
9032 && outer->outer_context->combined_loop)
9034 outer = outer->outer_context;
9035 n = splay_tree_lookup (outer->variables,
9036 (splay_tree_key)decl);
9037 if (omp_check_private (outer, decl, false))
9038 outer = NULL;
9039 else if (n == NULL
9040 || ((n->value & GOVD_DATA_SHARE_CLASS)
9041 == 0))
9042 omp_add_variable (outer, decl,
9043 GOVD_LASTPRIVATE
9044 | GOVD_SEEN);
9045 else
9046 outer = NULL;
9048 if (outer && outer->outer_context
9049 && (outer->outer_context->region_type
9050 == ORT_COMBINED_TEAMS))
9052 outer = outer->outer_context;
9053 n = splay_tree_lookup (outer->variables,
9054 (splay_tree_key)decl);
9055 if (n == NULL
9056 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9057 omp_add_variable (outer, decl,
9058 GOVD_SHARED | GOVD_SEEN);
9059 else
9060 outer = NULL;
9062 if (outer && outer->outer_context)
9063 omp_notice_variable (outer->outer_context, decl,
9064 true);
9069 c = build_omp_clause (input_location,
9070 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9071 : OMP_CLAUSE_PRIVATE);
9072 OMP_CLAUSE_DECL (c) = decl;
9073 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9074 OMP_FOR_CLAUSES (for_stmt) = c;
9075 omp_add_variable (gimplify_omp_ctxp, decl,
9076 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9077 | GOVD_EXPLICIT | GOVD_SEEN);
9078 c = NULL_TREE;
9081 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9082 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9083 else
9084 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9086 /* If DECL is not a gimple register, create a temporary variable to act
9087 as an iteration counter. This is valid, since DECL cannot be
9088 modified in the body of the loop. Similarly for any iteration vars
9089 in simd with collapse > 1 where the iterator vars must be
9090 lastprivate. */
9091 if (orig_for_stmt != for_stmt)
9092 var = decl;
9093 else if (!is_gimple_reg (decl)
9094 || (ort == ORT_SIMD
9095 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9097 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9098 /* Make sure omp_add_variable is not called on it prematurely.
9099 We call it ourselves a few lines later. */
9100 gimplify_omp_ctxp = NULL;
9101 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9102 gimplify_omp_ctxp = ctx;
9103 TREE_OPERAND (t, 0) = var;
9105 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9107 if (ort == ORT_SIMD
9108 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9110 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9111 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9112 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9113 OMP_CLAUSE_DECL (c2) = var;
9114 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9115 OMP_FOR_CLAUSES (for_stmt) = c2;
9116 omp_add_variable (gimplify_omp_ctxp, var,
9117 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
9118 if (c == NULL_TREE)
9120 c = c2;
9121 c2 = NULL_TREE;
9124 else
9125 omp_add_variable (gimplify_omp_ctxp, var,
9126 GOVD_PRIVATE | GOVD_SEEN);
9128 else
9129 var = decl;
9131 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9132 is_gimple_val, fb_rvalue, false);
9133 ret = MIN (ret, tret);
9134 if (ret == GS_ERROR)
9135 return ret;
9137 /* Handle OMP_FOR_COND. */
9138 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9139 gcc_assert (COMPARISON_CLASS_P (t));
9140 gcc_assert (TREE_OPERAND (t, 0) == decl);
9142 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9143 is_gimple_val, fb_rvalue, false);
9144 ret = MIN (ret, tret);
9146 /* Handle OMP_FOR_INCR. */
9147 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9148 switch (TREE_CODE (t))
9150 case PREINCREMENT_EXPR:
9151 case POSTINCREMENT_EXPR:
9153 tree decl = TREE_OPERAND (t, 0);
9154 /* c_omp_for_incr_canonicalize_ptr() should have been
9155 called to massage things appropriately. */
9156 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
9158 if (orig_for_stmt != for_stmt)
9159 break;
9160 t = build_int_cst (TREE_TYPE (decl), 1);
9161 if (c)
9162 OMP_CLAUSE_LINEAR_STEP (c) = t;
9163 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
9164 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
9165 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
9166 break;
9169 case PREDECREMENT_EXPR:
9170 case POSTDECREMENT_EXPR:
9171 /* c_omp_for_incr_canonicalize_ptr() should have been
9172 called to massage things appropriately. */
9173 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
9174 if (orig_for_stmt != for_stmt)
9175 break;
9176 t = build_int_cst (TREE_TYPE (decl), -1);
9177 if (c)
9178 OMP_CLAUSE_LINEAR_STEP (c) = t;
9179 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
9180 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
9181 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
9182 break;
9184 case MODIFY_EXPR:
9185 gcc_assert (TREE_OPERAND (t, 0) == decl);
9186 TREE_OPERAND (t, 0) = var;
9188 t = TREE_OPERAND (t, 1);
9189 switch (TREE_CODE (t))
9191 case PLUS_EXPR:
9192 if (TREE_OPERAND (t, 1) == decl)
9194 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
9195 TREE_OPERAND (t, 0) = var;
9196 break;
9199 /* Fallthru. */
9200 case MINUS_EXPR:
9201 case POINTER_PLUS_EXPR:
9202 gcc_assert (TREE_OPERAND (t, 0) == decl);
9203 TREE_OPERAND (t, 0) = var;
9204 break;
9205 default:
9206 gcc_unreachable ();
9209 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9210 is_gimple_val, fb_rvalue, false);
9211 ret = MIN (ret, tret);
9212 if (c)
9214 tree step = TREE_OPERAND (t, 1);
9215 tree stept = TREE_TYPE (decl);
9216 if (POINTER_TYPE_P (stept))
9217 stept = sizetype;
9218 step = fold_convert (stept, step);
9219 if (TREE_CODE (t) == MINUS_EXPR)
9220 step = fold_build1 (NEGATE_EXPR, stept, step);
9221 OMP_CLAUSE_LINEAR_STEP (c) = step;
9222 if (step != TREE_OPERAND (t, 1))
9224 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
9225 &for_pre_body, NULL,
9226 is_gimple_val, fb_rvalue, false);
9227 ret = MIN (ret, tret);
9230 break;
9232 default:
9233 gcc_unreachable ();
9236 if (c2)
9238 gcc_assert (c);
9239 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
9242 if ((var != decl || collapse > 1) && orig_for_stmt == for_stmt)
9244 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
9245 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9246 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
9247 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9248 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
9249 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
9250 && OMP_CLAUSE_DECL (c) == decl)
9252 if (is_doacross && (collapse == 1 || i >= collapse))
9253 t = var;
9254 else
9256 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9257 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9258 gcc_assert (TREE_OPERAND (t, 0) == var);
9259 t = TREE_OPERAND (t, 1);
9260 gcc_assert (TREE_CODE (t) == PLUS_EXPR
9261 || TREE_CODE (t) == MINUS_EXPR
9262 || TREE_CODE (t) == POINTER_PLUS_EXPR);
9263 gcc_assert (TREE_OPERAND (t, 0) == var);
9264 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
9265 is_doacross ? var : decl,
9266 TREE_OPERAND (t, 1));
9268 gimple_seq *seq;
9269 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9270 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
9271 else
9272 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
9273 gimplify_assign (decl, t, seq);
9278 BITMAP_FREE (has_decl_expr);
9280 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9282 push_gimplify_context ();
9283 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
9285 OMP_FOR_BODY (orig_for_stmt)
9286 = build3 (BIND_EXPR, void_type_node, NULL,
9287 OMP_FOR_BODY (orig_for_stmt), NULL);
9288 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
9292 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
9293 &for_body);
9295 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9297 if (gimple_code (g) == GIMPLE_BIND)
9298 pop_gimplify_context (g);
9299 else
9300 pop_gimplify_context (NULL);
9303 if (orig_for_stmt != for_stmt)
9304 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9306 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9307 decl = TREE_OPERAND (t, 0);
9308 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9309 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9310 gimplify_omp_ctxp = ctx->outer_context;
9311 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9312 gimplify_omp_ctxp = ctx;
9313 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
9314 TREE_OPERAND (t, 0) = var;
9315 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9316 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
9317 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
9320 gimplify_adjust_omp_clauses (pre_p, for_body,
9321 &OMP_FOR_CLAUSES (orig_for_stmt),
9322 TREE_CODE (orig_for_stmt));
9324 int kind;
9325 switch (TREE_CODE (orig_for_stmt))
9327 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
9328 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
9329 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
9330 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
9331 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
9332 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
9333 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
9334 default:
9335 gcc_unreachable ();
9337 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
9338 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
9339 for_pre_body);
9340 if (orig_for_stmt != for_stmt)
9341 gimple_omp_for_set_combined_p (gfor, true);
9342 if (gimplify_omp_ctxp
9343 && (gimplify_omp_ctxp->combined_loop
9344 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9345 && gimplify_omp_ctxp->outer_context
9346 && gimplify_omp_ctxp->outer_context->combined_loop)))
9348 gimple_omp_for_set_combined_into_p (gfor, true);
9349 if (gimplify_omp_ctxp->combined_loop)
9350 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
9351 else
9352 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
9355 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9357 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9358 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
9359 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
9360 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9361 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
9362 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
9363 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9364 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
9367 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
9368 constructs with GIMPLE_OMP_TASK sandwiched in between them.
9369 The outer taskloop stands for computing the number of iterations,
9370 counts for collapsed loops and holding taskloop specific clauses.
9371 The task construct stands for the effect of data sharing on the
9372 explicit task it creates and the inner taskloop stands for expansion
9373 of the static loop inside of the explicit task construct. */
9374 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9376 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
9377 tree task_clauses = NULL_TREE;
9378 tree c = *gfor_clauses_ptr;
9379 tree *gtask_clauses_ptr = &task_clauses;
9380 tree outer_for_clauses = NULL_TREE;
9381 tree *gforo_clauses_ptr = &outer_for_clauses;
9382 for (; c; c = OMP_CLAUSE_CHAIN (c))
9383 switch (OMP_CLAUSE_CODE (c))
9385 /* These clauses are allowed on task, move them there. */
9386 case OMP_CLAUSE_SHARED:
9387 case OMP_CLAUSE_FIRSTPRIVATE:
9388 case OMP_CLAUSE_DEFAULT:
9389 case OMP_CLAUSE_IF:
9390 case OMP_CLAUSE_UNTIED:
9391 case OMP_CLAUSE_FINAL:
9392 case OMP_CLAUSE_MERGEABLE:
9393 case OMP_CLAUSE_PRIORITY:
9394 *gtask_clauses_ptr = c;
9395 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9396 break;
9397 case OMP_CLAUSE_PRIVATE:
9398 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
9400 /* We want private on outer for and firstprivate
9401 on task. */
9402 *gtask_clauses_ptr
9403 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9404 OMP_CLAUSE_FIRSTPRIVATE);
9405 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9406 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9407 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9408 *gforo_clauses_ptr = c;
9409 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9411 else
9413 *gtask_clauses_ptr = c;
9414 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9416 break;
9417 /* These clauses go into outer taskloop clauses. */
9418 case OMP_CLAUSE_GRAINSIZE:
9419 case OMP_CLAUSE_NUM_TASKS:
9420 case OMP_CLAUSE_NOGROUP:
9421 *gforo_clauses_ptr = c;
9422 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9423 break;
9424 /* Taskloop clause we duplicate on both taskloops. */
9425 case OMP_CLAUSE_COLLAPSE:
9426 *gfor_clauses_ptr = c;
9427 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9428 *gforo_clauses_ptr = copy_node (c);
9429 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9430 break;
9431 /* For lastprivate, keep the clause on inner taskloop, and add
9432 a shared clause on task. If the same decl is also firstprivate,
9433 add also firstprivate clause on the inner taskloop. */
9434 case OMP_CLAUSE_LASTPRIVATE:
9435 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
9437 /* For taskloop C++ lastprivate IVs, we want:
9438 1) private on outer taskloop
9439 2) firstprivate and shared on task
9440 3) lastprivate on inner taskloop */
9441 *gtask_clauses_ptr
9442 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9443 OMP_CLAUSE_FIRSTPRIVATE);
9444 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9445 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9446 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9447 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
9448 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9449 OMP_CLAUSE_PRIVATE);
9450 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
9451 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
9452 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
9453 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9455 *gfor_clauses_ptr = c;
9456 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9457 *gtask_clauses_ptr
9458 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
9459 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9460 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9461 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
9462 gtask_clauses_ptr
9463 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9464 break;
9465 default:
9466 gcc_unreachable ();
9468 *gfor_clauses_ptr = NULL_TREE;
9469 *gtask_clauses_ptr = NULL_TREE;
9470 *gforo_clauses_ptr = NULL_TREE;
9471 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
9472 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
9473 NULL_TREE, NULL_TREE, NULL_TREE);
9474 gimple_omp_task_set_taskloop_p (g, true);
9475 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
9476 gomp_for *gforo
9477 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
9478 gimple_omp_for_collapse (gfor),
9479 gimple_omp_for_pre_body (gfor));
9480 gimple_omp_for_set_pre_body (gfor, NULL);
9481 gimple_omp_for_set_combined_p (gforo, true);
9482 gimple_omp_for_set_combined_into_p (gfor, true);
9483 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
9485 t = unshare_expr (gimple_omp_for_index (gfor, i));
9486 gimple_omp_for_set_index (gforo, i, t);
9487 t = unshare_expr (gimple_omp_for_initial (gfor, i));
9488 gimple_omp_for_set_initial (gforo, i, t);
9489 gimple_omp_for_set_cond (gforo, i,
9490 gimple_omp_for_cond (gfor, i));
9491 t = unshare_expr (gimple_omp_for_final (gfor, i));
9492 gimple_omp_for_set_final (gforo, i, t);
9493 t = unshare_expr (gimple_omp_for_incr (gfor, i));
9494 gimple_omp_for_set_incr (gforo, i, t);
9496 gimplify_seq_add_stmt (pre_p, gforo);
9498 else
9499 gimplify_seq_add_stmt (pre_p, gfor);
9500 if (ret != GS_ALL_DONE)
9501 return GS_ERROR;
9502 *expr_p = NULL_TREE;
9503 return GS_ALL_DONE;
9506 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
9507 of OMP_TARGET's body. */
9509 static tree
9510 find_omp_teams (tree *tp, int *walk_subtrees, void *)
9512 *walk_subtrees = 0;
9513 switch (TREE_CODE (*tp))
9515 case OMP_TEAMS:
9516 return *tp;
9517 case BIND_EXPR:
9518 case STATEMENT_LIST:
9519 *walk_subtrees = 1;
9520 break;
9521 default:
9522 break;
9524 return NULL_TREE;
9527 /* Helper function of optimize_target_teams, determine if the expression
9528 can be computed safely before the target construct on the host. */
9530 static tree
9531 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
9533 splay_tree_node n;
9535 if (TYPE_P (*tp))
9537 *walk_subtrees = 0;
9538 return NULL_TREE;
9540 switch (TREE_CODE (*tp))
9542 case VAR_DECL:
9543 case PARM_DECL:
9544 case RESULT_DECL:
9545 *walk_subtrees = 0;
9546 if (error_operand_p (*tp)
9547 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
9548 || DECL_HAS_VALUE_EXPR_P (*tp)
9549 || DECL_THREAD_LOCAL_P (*tp)
9550 || TREE_SIDE_EFFECTS (*tp)
9551 || TREE_THIS_VOLATILE (*tp))
9552 return *tp;
9553 if (is_global_var (*tp)
9554 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
9555 || lookup_attribute ("omp declare target link",
9556 DECL_ATTRIBUTES (*tp))))
9557 return *tp;
9558 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9559 (splay_tree_key) *tp);
9560 if (n == NULL)
9562 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
9563 return NULL_TREE;
9564 return *tp;
9566 else if (n->value & GOVD_LOCAL)
9567 return *tp;
9568 else if (n->value & GOVD_FIRSTPRIVATE)
9569 return NULL_TREE;
9570 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9571 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9572 return NULL_TREE;
9573 return *tp;
9574 case INTEGER_CST:
9575 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9576 return *tp;
9577 return NULL_TREE;
9578 case TARGET_EXPR:
9579 if (TARGET_EXPR_INITIAL (*tp)
9580 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
9581 return *tp;
9582 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
9583 walk_subtrees, NULL);
9584 /* Allow some reasonable subset of integral arithmetics. */
9585 case PLUS_EXPR:
9586 case MINUS_EXPR:
9587 case MULT_EXPR:
9588 case TRUNC_DIV_EXPR:
9589 case CEIL_DIV_EXPR:
9590 case FLOOR_DIV_EXPR:
9591 case ROUND_DIV_EXPR:
9592 case TRUNC_MOD_EXPR:
9593 case CEIL_MOD_EXPR:
9594 case FLOOR_MOD_EXPR:
9595 case ROUND_MOD_EXPR:
9596 case RDIV_EXPR:
9597 case EXACT_DIV_EXPR:
9598 case MIN_EXPR:
9599 case MAX_EXPR:
9600 case LSHIFT_EXPR:
9601 case RSHIFT_EXPR:
9602 case BIT_IOR_EXPR:
9603 case BIT_XOR_EXPR:
9604 case BIT_AND_EXPR:
9605 case NEGATE_EXPR:
9606 case ABS_EXPR:
9607 case BIT_NOT_EXPR:
9608 case NON_LVALUE_EXPR:
9609 CASE_CONVERT:
9610 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9611 return *tp;
9612 return NULL_TREE;
9613 /* And disallow anything else, except for comparisons. */
9614 default:
9615 if (COMPARISON_CLASS_P (*tp))
9616 return NULL_TREE;
9617 return *tp;
9621 /* Try to determine if the num_teams and/or thread_limit expressions
9622 can have their values determined already before entering the
9623 target construct.
9624 INTEGER_CSTs trivially are,
9625 integral decls that are firstprivate (explicitly or implicitly)
9626 or explicitly map(always, to:) or map(always, tofrom:) on the target
9627 region too, and expressions involving simple arithmetics on those
9628 too, function calls are not ok, dereferencing something neither etc.
9629 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
9630 EXPR based on what we find:
9631 0 stands for clause not specified at all, use implementation default
9632 -1 stands for value that can't be determined easily before entering
9633 the target construct.
9634 If teams construct is not present at all, use 1 for num_teams
9635 and 0 for thread_limit (only one team is involved, and the thread
9636 limit is implementation defined. */
9638 static void
9639 optimize_target_teams (tree target, gimple_seq *pre_p)
9641 tree body = OMP_BODY (target);
9642 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
9643 tree num_teams = integer_zero_node;
9644 tree thread_limit = integer_zero_node;
9645 location_t num_teams_loc = EXPR_LOCATION (target);
9646 location_t thread_limit_loc = EXPR_LOCATION (target);
9647 tree c, *p, expr;
9648 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
9650 if (teams == NULL_TREE)
9651 num_teams = integer_one_node;
9652 else
9653 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
9655 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
9657 p = &num_teams;
9658 num_teams_loc = OMP_CLAUSE_LOCATION (c);
9660 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
9662 p = &thread_limit;
9663 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
9665 else
9666 continue;
9667 expr = OMP_CLAUSE_OPERAND (c, 0);
9668 if (TREE_CODE (expr) == INTEGER_CST)
9670 *p = expr;
9671 continue;
9673 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
9675 *p = integer_minus_one_node;
9676 continue;
9678 *p = expr;
9679 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
9680 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
9681 == GS_ERROR)
9683 gimplify_omp_ctxp = target_ctx;
9684 *p = integer_minus_one_node;
9685 continue;
9687 gimplify_omp_ctxp = target_ctx;
9688 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
9689 OMP_CLAUSE_OPERAND (c, 0) = *p;
9691 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
9692 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
9693 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9694 OMP_TARGET_CLAUSES (target) = c;
9695 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
9696 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
9697 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9698 OMP_TARGET_CLAUSES (target) = c;
9701 /* Gimplify the gross structure of several OMP constructs. */
9703 static void
9704 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
9706 tree expr = *expr_p;
9707 gimple *stmt;
9708 gimple_seq body = NULL;
9709 enum omp_region_type ort;
9711 switch (TREE_CODE (expr))
9713 case OMP_SECTIONS:
9714 case OMP_SINGLE:
9715 ort = ORT_WORKSHARE;
9716 break;
9717 case OMP_TARGET:
9718 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
9719 break;
9720 case OACC_KERNELS:
9721 ort = ORT_ACC_KERNELS;
9722 break;
9723 case OACC_PARALLEL:
9724 ort = ORT_ACC_PARALLEL;
9725 break;
9726 case OACC_DATA:
9727 ort = ORT_ACC_DATA;
9728 break;
9729 case OMP_TARGET_DATA:
9730 ort = ORT_TARGET_DATA;
9731 break;
9732 case OMP_TEAMS:
9733 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
9734 break;
9735 case OACC_HOST_DATA:
9736 ort = ORT_ACC_HOST_DATA;
9737 break;
9738 default:
9739 gcc_unreachable ();
9741 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
9742 TREE_CODE (expr));
9743 if (TREE_CODE (expr) == OMP_TARGET)
9744 optimize_target_teams (expr, pre_p);
9745 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
9747 push_gimplify_context ();
9748 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
9749 if (gimple_code (g) == GIMPLE_BIND)
9750 pop_gimplify_context (g);
9751 else
9752 pop_gimplify_context (NULL);
9753 if ((ort & ORT_TARGET_DATA) != 0)
9755 enum built_in_function end_ix;
9756 switch (TREE_CODE (expr))
9758 case OACC_DATA:
9759 case OACC_HOST_DATA:
9760 end_ix = BUILT_IN_GOACC_DATA_END;
9761 break;
9762 case OMP_TARGET_DATA:
9763 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
9764 break;
9765 default:
9766 gcc_unreachable ();
9768 tree fn = builtin_decl_explicit (end_ix);
9769 g = gimple_build_call (fn, 0);
9770 gimple_seq cleanup = NULL;
9771 gimple_seq_add_stmt (&cleanup, g);
9772 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
9773 body = NULL;
9774 gimple_seq_add_stmt (&body, g);
9777 else
9778 gimplify_and_add (OMP_BODY (expr), &body);
9779 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
9780 TREE_CODE (expr));
9782 switch (TREE_CODE (expr))
9784 case OACC_DATA:
9785 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
9786 OMP_CLAUSES (expr));
9787 break;
9788 case OACC_KERNELS:
9789 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
9790 OMP_CLAUSES (expr));
9791 break;
9792 case OACC_HOST_DATA:
9793 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
9794 OMP_CLAUSES (expr));
9795 break;
9796 case OACC_PARALLEL:
9797 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
9798 OMP_CLAUSES (expr));
9799 break;
9800 case OMP_SECTIONS:
9801 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
9802 break;
9803 case OMP_SINGLE:
9804 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
9805 break;
9806 case OMP_TARGET:
9807 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
9808 OMP_CLAUSES (expr));
9809 break;
9810 case OMP_TARGET_DATA:
9811 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
9812 OMP_CLAUSES (expr));
9813 break;
9814 case OMP_TEAMS:
9815 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
9816 break;
9817 default:
9818 gcc_unreachable ();
9821 gimplify_seq_add_stmt (pre_p, stmt);
9822 *expr_p = NULL_TREE;
9825 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
9826 target update constructs. */
9828 static void
9829 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
9831 tree expr = *expr_p;
9832 int kind;
9833 gomp_target *stmt;
9834 enum omp_region_type ort = ORT_WORKSHARE;
9836 switch (TREE_CODE (expr))
9838 case OACC_ENTER_DATA:
9839 case OACC_EXIT_DATA:
9840 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
9841 ort = ORT_ACC;
9842 break;
9843 case OACC_UPDATE:
9844 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
9845 ort = ORT_ACC;
9846 break;
9847 case OMP_TARGET_UPDATE:
9848 kind = GF_OMP_TARGET_KIND_UPDATE;
9849 break;
9850 case OMP_TARGET_ENTER_DATA:
9851 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
9852 break;
9853 case OMP_TARGET_EXIT_DATA:
9854 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
9855 break;
9856 default:
9857 gcc_unreachable ();
9859 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
9860 ort, TREE_CODE (expr));
9861 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
9862 TREE_CODE (expr));
9863 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
9865 gimplify_seq_add_stmt (pre_p, stmt);
9866 *expr_p = NULL_TREE;
9869 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
9870 stabilized the lhs of the atomic operation as *ADDR. Return true if
9871 EXPR is this stabilized form. */
9873 static bool
9874 goa_lhs_expr_p (tree expr, tree addr)
9876 /* Also include casts to other type variants. The C front end is fond
9877 of adding these for e.g. volatile variables. This is like
9878 STRIP_TYPE_NOPS but includes the main variant lookup. */
9879 STRIP_USELESS_TYPE_CONVERSION (expr);
9881 if (TREE_CODE (expr) == INDIRECT_REF)
9883 expr = TREE_OPERAND (expr, 0);
9884 while (expr != addr
9885 && (CONVERT_EXPR_P (expr)
9886 || TREE_CODE (expr) == NON_LVALUE_EXPR)
9887 && TREE_CODE (expr) == TREE_CODE (addr)
9888 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
9890 expr = TREE_OPERAND (expr, 0);
9891 addr = TREE_OPERAND (addr, 0);
9893 if (expr == addr)
9894 return true;
9895 return (TREE_CODE (addr) == ADDR_EXPR
9896 && TREE_CODE (expr) == ADDR_EXPR
9897 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
9899 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
9900 return true;
9901 return false;
9904 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
9905 expression does not involve the lhs, evaluate it into a temporary.
9906 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
9907 or -1 if an error was encountered. */
9909 static int
9910 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
9911 tree lhs_var)
9913 tree expr = *expr_p;
9914 int saw_lhs;
9916 if (goa_lhs_expr_p (expr, lhs_addr))
9918 *expr_p = lhs_var;
9919 return 1;
9921 if (is_gimple_val (expr))
9922 return 0;
9924 saw_lhs = 0;
9925 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
9927 case tcc_binary:
9928 case tcc_comparison:
9929 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
9930 lhs_var);
9931 case tcc_unary:
9932 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
9933 lhs_var);
9934 break;
9935 case tcc_expression:
9936 switch (TREE_CODE (expr))
9938 case TRUTH_ANDIF_EXPR:
9939 case TRUTH_ORIF_EXPR:
9940 case TRUTH_AND_EXPR:
9941 case TRUTH_OR_EXPR:
9942 case TRUTH_XOR_EXPR:
9943 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
9944 lhs_addr, lhs_var);
9945 case TRUTH_NOT_EXPR:
9946 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
9947 lhs_addr, lhs_var);
9948 break;
9949 case COMPOUND_EXPR:
9950 /* Break out any preevaluations from cp_build_modify_expr. */
9951 for (; TREE_CODE (expr) == COMPOUND_EXPR;
9952 expr = TREE_OPERAND (expr, 1))
9953 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
9954 *expr_p = expr;
9955 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
9956 default:
9957 break;
9959 break;
9960 default:
9961 break;
9964 if (saw_lhs == 0)
9966 enum gimplify_status gs;
9967 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
9968 if (gs != GS_ALL_DONE)
9969 saw_lhs = -1;
9972 return saw_lhs;
9975 /* Gimplify an OMP_ATOMIC statement. */
9977 static enum gimplify_status
9978 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
9980 tree addr = TREE_OPERAND (*expr_p, 0);
9981 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
9982 ? NULL : TREE_OPERAND (*expr_p, 1);
9983 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
9984 tree tmp_load;
9985 gomp_atomic_load *loadstmt;
9986 gomp_atomic_store *storestmt;
9988 tmp_load = create_tmp_reg (type);
9989 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
9990 return GS_ERROR;
9992 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
9993 != GS_ALL_DONE)
9994 return GS_ERROR;
9996 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
9997 gimplify_seq_add_stmt (pre_p, loadstmt);
9998 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
9999 != GS_ALL_DONE)
10000 return GS_ERROR;
10002 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10003 rhs = tmp_load;
10004 storestmt = gimple_build_omp_atomic_store (rhs);
10005 gimplify_seq_add_stmt (pre_p, storestmt);
10006 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10008 gimple_omp_atomic_set_seq_cst (loadstmt);
10009 gimple_omp_atomic_set_seq_cst (storestmt);
10011 switch (TREE_CODE (*expr_p))
10013 case OMP_ATOMIC_READ:
10014 case OMP_ATOMIC_CAPTURE_OLD:
10015 *expr_p = tmp_load;
10016 gimple_omp_atomic_set_need_value (loadstmt);
10017 break;
10018 case OMP_ATOMIC_CAPTURE_NEW:
10019 *expr_p = rhs;
10020 gimple_omp_atomic_set_need_value (storestmt);
10021 break;
10022 default:
10023 *expr_p = NULL;
10024 break;
10027 return GS_ALL_DONE;
10030 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10031 body, and adding some EH bits. */
10033 static enum gimplify_status
10034 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10036 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10037 gimple *body_stmt;
10038 gtransaction *trans_stmt;
10039 gimple_seq body = NULL;
10040 int subcode = 0;
10042 /* Wrap the transaction body in a BIND_EXPR so we have a context
10043 where to put decls for OMP. */
10044 if (TREE_CODE (tbody) != BIND_EXPR)
10046 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10047 TREE_SIDE_EFFECTS (bind) = 1;
10048 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10049 TRANSACTION_EXPR_BODY (expr) = bind;
10052 push_gimplify_context ();
10053 temp = voidify_wrapper_expr (*expr_p, NULL);
10055 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10056 pop_gimplify_context (body_stmt);
10058 trans_stmt = gimple_build_transaction (body);
10059 if (TRANSACTION_EXPR_OUTER (expr))
10060 subcode = GTMA_IS_OUTER;
10061 else if (TRANSACTION_EXPR_RELAXED (expr))
10062 subcode = GTMA_IS_RELAXED;
10063 gimple_transaction_set_subcode (trans_stmt, subcode);
10065 gimplify_seq_add_stmt (pre_p, trans_stmt);
10067 if (temp)
10069 *expr_p = temp;
10070 return GS_OK;
10073 *expr_p = NULL_TREE;
10074 return GS_ALL_DONE;
10077 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10078 is the OMP_BODY of the original EXPR (which has already been
10079 gimplified so it's not present in the EXPR).
10081 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10083 static gimple *
10084 gimplify_omp_ordered (tree expr, gimple_seq body)
10086 tree c, decls;
10087 int failures = 0;
10088 unsigned int i;
10089 tree source_c = NULL_TREE;
10090 tree sink_c = NULL_TREE;
10092 if (gimplify_omp_ctxp)
10094 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10095 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10096 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10097 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
10098 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
10100 error_at (OMP_CLAUSE_LOCATION (c),
10101 "%<ordered%> construct with %<depend%> clause must be "
10102 "closely nested inside a loop with %<ordered%> clause "
10103 "with a parameter");
10104 failures++;
10106 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10107 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
10109 bool fail = false;
10110 for (decls = OMP_CLAUSE_DECL (c), i = 0;
10111 decls && TREE_CODE (decls) == TREE_LIST;
10112 decls = TREE_CHAIN (decls), ++i)
10113 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
10114 continue;
10115 else if (TREE_VALUE (decls)
10116 != gimplify_omp_ctxp->loop_iter_var[2 * i])
10118 error_at (OMP_CLAUSE_LOCATION (c),
10119 "variable %qE is not an iteration "
10120 "of outermost loop %d, expected %qE",
10121 TREE_VALUE (decls), i + 1,
10122 gimplify_omp_ctxp->loop_iter_var[2 * i]);
10123 fail = true;
10124 failures++;
10126 else
10127 TREE_VALUE (decls)
10128 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
10129 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
10131 error_at (OMP_CLAUSE_LOCATION (c),
10132 "number of variables in %<depend(sink)%> "
10133 "clause does not match number of "
10134 "iteration variables");
10135 failures++;
10137 sink_c = c;
10139 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10140 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
10142 if (source_c)
10144 error_at (OMP_CLAUSE_LOCATION (c),
10145 "more than one %<depend(source)%> clause on an "
10146 "%<ordered%> construct");
10147 failures++;
10149 else
10150 source_c = c;
10153 if (source_c && sink_c)
10155 error_at (OMP_CLAUSE_LOCATION (source_c),
10156 "%<depend(source)%> clause specified together with "
10157 "%<depend(sink:)%> clauses on the same construct");
10158 failures++;
10161 if (failures)
10162 return gimple_build_nop ();
10163 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
10166 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
10167 expression produces a value to be used as an operand inside a GIMPLE
10168 statement, the value will be stored back in *EXPR_P. This value will
10169 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
10170 an SSA_NAME. The corresponding sequence of GIMPLE statements is
10171 emitted in PRE_P and POST_P.
10173 Additionally, this process may overwrite parts of the input
10174 expression during gimplification. Ideally, it should be
10175 possible to do non-destructive gimplification.
10177 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
10178 the expression needs to evaluate to a value to be used as
10179 an operand in a GIMPLE statement, this value will be stored in
10180 *EXPR_P on exit. This happens when the caller specifies one
10181 of fb_lvalue or fb_rvalue fallback flags.
10183 PRE_P will contain the sequence of GIMPLE statements corresponding
10184 to the evaluation of EXPR and all the side-effects that must
10185 be executed before the main expression. On exit, the last
10186 statement of PRE_P is the core statement being gimplified. For
10187 instance, when gimplifying 'if (++a)' the last statement in
10188 PRE_P will be 'if (t.1)' where t.1 is the result of
10189 pre-incrementing 'a'.
10191 POST_P will contain the sequence of GIMPLE statements corresponding
10192 to the evaluation of all the side-effects that must be executed
10193 after the main expression. If this is NULL, the post
10194 side-effects are stored at the end of PRE_P.
10196 The reason why the output is split in two is to handle post
10197 side-effects explicitly. In some cases, an expression may have
10198 inner and outer post side-effects which need to be emitted in
10199 an order different from the one given by the recursive
10200 traversal. For instance, for the expression (*p--)++ the post
10201 side-effects of '--' must actually occur *after* the post
10202 side-effects of '++'. However, gimplification will first visit
10203 the inner expression, so if a separate POST sequence was not
10204 used, the resulting sequence would be:
10206 1 t.1 = *p
10207 2 p = p - 1
10208 3 t.2 = t.1 + 1
10209 4 *p = t.2
10211 However, the post-decrement operation in line #2 must not be
10212 evaluated until after the store to *p at line #4, so the
10213 correct sequence should be:
10215 1 t.1 = *p
10216 2 t.2 = t.1 + 1
10217 3 *p = t.2
10218 4 p = p - 1
10220 So, by specifying a separate post queue, it is possible
10221 to emit the post side-effects in the correct order.
10222 If POST_P is NULL, an internal queue will be used. Before
10223 returning to the caller, the sequence POST_P is appended to
10224 the main output sequence PRE_P.
10226 GIMPLE_TEST_F points to a function that takes a tree T and
10227 returns nonzero if T is in the GIMPLE form requested by the
10228 caller. The GIMPLE predicates are in gimple.c.
10230 FALLBACK tells the function what sort of a temporary we want if
10231 gimplification cannot produce an expression that complies with
10232 GIMPLE_TEST_F.
10234 fb_none means that no temporary should be generated
10235 fb_rvalue means that an rvalue is OK to generate
10236 fb_lvalue means that an lvalue is OK to generate
10237 fb_either means that either is OK, but an lvalue is preferable.
10238 fb_mayfail means that gimplification may fail (in which case
10239 GS_ERROR will be returned)
10241 The return value is either GS_ERROR or GS_ALL_DONE, since this
10242 function iterates until EXPR is completely gimplified or an error
10243 occurs. */
10245 enum gimplify_status
10246 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
10247 bool (*gimple_test_f) (tree), fallback_t fallback)
10249 tree tmp;
10250 gimple_seq internal_pre = NULL;
10251 gimple_seq internal_post = NULL;
10252 tree save_expr;
10253 bool is_statement;
10254 location_t saved_location;
10255 enum gimplify_status ret;
10256 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
10258 save_expr = *expr_p;
10259 if (save_expr == NULL_TREE)
10260 return GS_ALL_DONE;
10262 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
10263 is_statement = gimple_test_f == is_gimple_stmt;
10264 if (is_statement)
10265 gcc_assert (pre_p);
10267 /* Consistency checks. */
10268 if (gimple_test_f == is_gimple_reg)
10269 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
10270 else if (gimple_test_f == is_gimple_val
10271 || gimple_test_f == is_gimple_call_addr
10272 || gimple_test_f == is_gimple_condexpr
10273 || gimple_test_f == is_gimple_mem_rhs
10274 || gimple_test_f == is_gimple_mem_rhs_or_call
10275 || gimple_test_f == is_gimple_reg_rhs
10276 || gimple_test_f == is_gimple_reg_rhs_or_call
10277 || gimple_test_f == is_gimple_asm_val
10278 || gimple_test_f == is_gimple_mem_ref_addr)
10279 gcc_assert (fallback & fb_rvalue);
10280 else if (gimple_test_f == is_gimple_min_lval
10281 || gimple_test_f == is_gimple_lvalue)
10282 gcc_assert (fallback & fb_lvalue);
10283 else if (gimple_test_f == is_gimple_addressable)
10284 gcc_assert (fallback & fb_either);
10285 else if (gimple_test_f == is_gimple_stmt)
10286 gcc_assert (fallback == fb_none);
10287 else
10289 /* We should have recognized the GIMPLE_TEST_F predicate to
10290 know what kind of fallback to use in case a temporary is
10291 needed to hold the value or address of *EXPR_P. */
10292 gcc_unreachable ();
10295 /* We used to check the predicate here and return immediately if it
10296 succeeds. This is wrong; the design is for gimplification to be
10297 idempotent, and for the predicates to only test for valid forms, not
10298 whether they are fully simplified. */
10299 if (pre_p == NULL)
10300 pre_p = &internal_pre;
10302 if (post_p == NULL)
10303 post_p = &internal_post;
10305 /* Remember the last statements added to PRE_P and POST_P. Every
10306 new statement added by the gimplification helpers needs to be
10307 annotated with location information. To centralize the
10308 responsibility, we remember the last statement that had been
10309 added to both queues before gimplifying *EXPR_P. If
10310 gimplification produces new statements in PRE_P and POST_P, those
10311 statements will be annotated with the same location information
10312 as *EXPR_P. */
10313 pre_last_gsi = gsi_last (*pre_p);
10314 post_last_gsi = gsi_last (*post_p);
10316 saved_location = input_location;
10317 if (save_expr != error_mark_node
10318 && EXPR_HAS_LOCATION (*expr_p))
10319 input_location = EXPR_LOCATION (*expr_p);
10321 /* Loop over the specific gimplifiers until the toplevel node
10322 remains the same. */
10325 /* Strip away as many useless type conversions as possible
10326 at the toplevel. */
10327 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
10329 /* Remember the expr. */
10330 save_expr = *expr_p;
10332 /* Die, die, die, my darling. */
10333 if (save_expr == error_mark_node
10334 || (TREE_TYPE (save_expr)
10335 && TREE_TYPE (save_expr) == error_mark_node))
10337 ret = GS_ERROR;
10338 break;
10341 /* Do any language-specific gimplification. */
10342 ret = ((enum gimplify_status)
10343 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
10344 if (ret == GS_OK)
10346 if (*expr_p == NULL_TREE)
10347 break;
10348 if (*expr_p != save_expr)
10349 continue;
10351 else if (ret != GS_UNHANDLED)
10352 break;
10354 /* Make sure that all the cases set 'ret' appropriately. */
10355 ret = GS_UNHANDLED;
10356 switch (TREE_CODE (*expr_p))
10358 /* First deal with the special cases. */
10360 case POSTINCREMENT_EXPR:
10361 case POSTDECREMENT_EXPR:
10362 case PREINCREMENT_EXPR:
10363 case PREDECREMENT_EXPR:
10364 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
10365 fallback != fb_none,
10366 TREE_TYPE (*expr_p));
10367 break;
10369 case VIEW_CONVERT_EXPR:
10370 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
10371 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
10373 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10374 post_p, is_gimple_val, fb_rvalue);
10375 recalculate_side_effects (*expr_p);
10376 break;
10378 /* Fallthru. */
10380 case ARRAY_REF:
10381 case ARRAY_RANGE_REF:
10382 case REALPART_EXPR:
10383 case IMAGPART_EXPR:
10384 case COMPONENT_REF:
10385 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
10386 fallback ? fallback : fb_rvalue);
10387 break;
10389 case COND_EXPR:
10390 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
10392 /* C99 code may assign to an array in a structure value of a
10393 conditional expression, and this has undefined behavior
10394 only on execution, so create a temporary if an lvalue is
10395 required. */
10396 if (fallback == fb_lvalue)
10398 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
10399 mark_addressable (*expr_p);
10400 ret = GS_OK;
10402 break;
10404 case CALL_EXPR:
10405 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
10407 /* C99 code may assign to an array in a structure returned
10408 from a function, and this has undefined behavior only on
10409 execution, so create a temporary if an lvalue is
10410 required. */
10411 if (fallback == fb_lvalue)
10413 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
10414 mark_addressable (*expr_p);
10415 ret = GS_OK;
10417 break;
10419 case TREE_LIST:
10420 gcc_unreachable ();
10422 case COMPOUND_EXPR:
10423 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
10424 break;
10426 case COMPOUND_LITERAL_EXPR:
10427 ret = gimplify_compound_literal_expr (expr_p, pre_p,
10428 gimple_test_f, fallback);
10429 break;
10431 case MODIFY_EXPR:
10432 case INIT_EXPR:
10433 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
10434 fallback != fb_none);
10435 break;
10437 case TRUTH_ANDIF_EXPR:
10438 case TRUTH_ORIF_EXPR:
10440 /* Preserve the original type of the expression and the
10441 source location of the outer expression. */
10442 tree org_type = TREE_TYPE (*expr_p);
10443 *expr_p = gimple_boolify (*expr_p);
10444 *expr_p = build3_loc (input_location, COND_EXPR,
10445 org_type, *expr_p,
10446 fold_convert_loc
10447 (input_location,
10448 org_type, boolean_true_node),
10449 fold_convert_loc
10450 (input_location,
10451 org_type, boolean_false_node));
10452 ret = GS_OK;
10453 break;
10456 case TRUTH_NOT_EXPR:
10458 tree type = TREE_TYPE (*expr_p);
10459 /* The parsers are careful to generate TRUTH_NOT_EXPR
10460 only with operands that are always zero or one.
10461 We do not fold here but handle the only interesting case
10462 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
10463 *expr_p = gimple_boolify (*expr_p);
10464 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
10465 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
10466 TREE_TYPE (*expr_p),
10467 TREE_OPERAND (*expr_p, 0));
10468 else
10469 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
10470 TREE_TYPE (*expr_p),
10471 TREE_OPERAND (*expr_p, 0),
10472 build_int_cst (TREE_TYPE (*expr_p), 1));
10473 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
10474 *expr_p = fold_convert_loc (input_location, type, *expr_p);
10475 ret = GS_OK;
10476 break;
10479 case ADDR_EXPR:
10480 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
10481 break;
10483 case ANNOTATE_EXPR:
10485 tree cond = TREE_OPERAND (*expr_p, 0);
10486 tree kind = TREE_OPERAND (*expr_p, 1);
10487 tree type = TREE_TYPE (cond);
10488 if (!INTEGRAL_TYPE_P (type))
10490 *expr_p = cond;
10491 ret = GS_OK;
10492 break;
10494 tree tmp = create_tmp_var (type);
10495 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
10496 gcall *call
10497 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
10498 gimple_call_set_lhs (call, tmp);
10499 gimplify_seq_add_stmt (pre_p, call);
10500 *expr_p = tmp;
10501 ret = GS_ALL_DONE;
10502 break;
10505 case VA_ARG_EXPR:
10506 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
10507 break;
10509 CASE_CONVERT:
10510 if (IS_EMPTY_STMT (*expr_p))
10512 ret = GS_ALL_DONE;
10513 break;
10516 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
10517 || fallback == fb_none)
10519 /* Just strip a conversion to void (or in void context) and
10520 try again. */
10521 *expr_p = TREE_OPERAND (*expr_p, 0);
10522 ret = GS_OK;
10523 break;
10526 ret = gimplify_conversion (expr_p);
10527 if (ret == GS_ERROR)
10528 break;
10529 if (*expr_p != save_expr)
10530 break;
10531 /* FALLTHRU */
10533 case FIX_TRUNC_EXPR:
10534 /* unary_expr: ... | '(' cast ')' val | ... */
10535 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10536 is_gimple_val, fb_rvalue);
10537 recalculate_side_effects (*expr_p);
10538 break;
10540 case INDIRECT_REF:
10542 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
10543 bool notrap = TREE_THIS_NOTRAP (*expr_p);
10544 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
10546 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
10547 if (*expr_p != save_expr)
10549 ret = GS_OK;
10550 break;
10553 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10554 is_gimple_reg, fb_rvalue);
10555 if (ret == GS_ERROR)
10556 break;
10558 recalculate_side_effects (*expr_p);
10559 *expr_p = fold_build2_loc (input_location, MEM_REF,
10560 TREE_TYPE (*expr_p),
10561 TREE_OPERAND (*expr_p, 0),
10562 build_int_cst (saved_ptr_type, 0));
10563 TREE_THIS_VOLATILE (*expr_p) = volatilep;
10564 TREE_THIS_NOTRAP (*expr_p) = notrap;
10565 ret = GS_OK;
10566 break;
10569 /* We arrive here through the various re-gimplifcation paths. */
10570 case MEM_REF:
10571 /* First try re-folding the whole thing. */
10572 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
10573 TREE_OPERAND (*expr_p, 0),
10574 TREE_OPERAND (*expr_p, 1));
10575 if (tmp)
10577 REF_REVERSE_STORAGE_ORDER (tmp)
10578 = REF_REVERSE_STORAGE_ORDER (*expr_p);
10579 *expr_p = tmp;
10580 recalculate_side_effects (*expr_p);
10581 ret = GS_OK;
10582 break;
10584 /* Avoid re-gimplifying the address operand if it is already
10585 in suitable form. Re-gimplifying would mark the address
10586 operand addressable. Always gimplify when not in SSA form
10587 as we still may have to gimplify decls with value-exprs. */
10588 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
10589 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
10591 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10592 is_gimple_mem_ref_addr, fb_rvalue);
10593 if (ret == GS_ERROR)
10594 break;
10596 recalculate_side_effects (*expr_p);
10597 ret = GS_ALL_DONE;
10598 break;
10600 /* Constants need not be gimplified. */
10601 case INTEGER_CST:
10602 case REAL_CST:
10603 case FIXED_CST:
10604 case STRING_CST:
10605 case COMPLEX_CST:
10606 case VECTOR_CST:
10607 /* Drop the overflow flag on constants, we do not want
10608 that in the GIMPLE IL. */
10609 if (TREE_OVERFLOW_P (*expr_p))
10610 *expr_p = drop_tree_overflow (*expr_p);
10611 ret = GS_ALL_DONE;
10612 break;
10614 case CONST_DECL:
10615 /* If we require an lvalue, such as for ADDR_EXPR, retain the
10616 CONST_DECL node. Otherwise the decl is replaceable by its
10617 value. */
10618 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
10619 if (fallback & fb_lvalue)
10620 ret = GS_ALL_DONE;
10621 else
10623 *expr_p = DECL_INITIAL (*expr_p);
10624 ret = GS_OK;
10626 break;
10628 case DECL_EXPR:
10629 ret = gimplify_decl_expr (expr_p, pre_p);
10630 break;
10632 case BIND_EXPR:
10633 ret = gimplify_bind_expr (expr_p, pre_p);
10634 break;
10636 case LOOP_EXPR:
10637 ret = gimplify_loop_expr (expr_p, pre_p);
10638 break;
10640 case SWITCH_EXPR:
10641 ret = gimplify_switch_expr (expr_p, pre_p);
10642 break;
10644 case EXIT_EXPR:
10645 ret = gimplify_exit_expr (expr_p);
10646 break;
10648 case GOTO_EXPR:
10649 /* If the target is not LABEL, then it is a computed jump
10650 and the target needs to be gimplified. */
10651 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
10653 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
10654 NULL, is_gimple_val, fb_rvalue);
10655 if (ret == GS_ERROR)
10656 break;
10658 gimplify_seq_add_stmt (pre_p,
10659 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
10660 ret = GS_ALL_DONE;
10661 break;
10663 case PREDICT_EXPR:
10664 gimplify_seq_add_stmt (pre_p,
10665 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
10666 PREDICT_EXPR_OUTCOME (*expr_p)));
10667 ret = GS_ALL_DONE;
10668 break;
10670 case LABEL_EXPR:
10671 ret = GS_ALL_DONE;
10672 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
10673 == current_function_decl);
10674 gimplify_seq_add_stmt (pre_p,
10675 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
10676 break;
10678 case CASE_LABEL_EXPR:
10679 ret = gimplify_case_label_expr (expr_p, pre_p);
10680 break;
10682 case RETURN_EXPR:
10683 ret = gimplify_return_expr (*expr_p, pre_p);
10684 break;
10686 case CONSTRUCTOR:
10687 /* Don't reduce this in place; let gimplify_init_constructor work its
10688 magic. Buf if we're just elaborating this for side effects, just
10689 gimplify any element that has side-effects. */
10690 if (fallback == fb_none)
10692 unsigned HOST_WIDE_INT ix;
10693 tree val;
10694 tree temp = NULL_TREE;
10695 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
10696 if (TREE_SIDE_EFFECTS (val))
10697 append_to_statement_list (val, &temp);
10699 *expr_p = temp;
10700 ret = temp ? GS_OK : GS_ALL_DONE;
10702 /* C99 code may assign to an array in a constructed
10703 structure or union, and this has undefined behavior only
10704 on execution, so create a temporary if an lvalue is
10705 required. */
10706 else if (fallback == fb_lvalue)
10708 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
10709 mark_addressable (*expr_p);
10710 ret = GS_OK;
10712 else
10713 ret = GS_ALL_DONE;
10714 break;
10716 /* The following are special cases that are not handled by the
10717 original GIMPLE grammar. */
10719 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
10720 eliminated. */
10721 case SAVE_EXPR:
10722 ret = gimplify_save_expr (expr_p, pre_p, post_p);
10723 break;
10725 case BIT_FIELD_REF:
10726 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10727 post_p, is_gimple_lvalue, fb_either);
10728 recalculate_side_effects (*expr_p);
10729 break;
10731 case TARGET_MEM_REF:
10733 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
10735 if (TMR_BASE (*expr_p))
10736 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
10737 post_p, is_gimple_mem_ref_addr, fb_either);
10738 if (TMR_INDEX (*expr_p))
10739 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
10740 post_p, is_gimple_val, fb_rvalue);
10741 if (TMR_INDEX2 (*expr_p))
10742 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
10743 post_p, is_gimple_val, fb_rvalue);
10744 /* TMR_STEP and TMR_OFFSET are always integer constants. */
10745 ret = MIN (r0, r1);
10747 break;
10749 case NON_LVALUE_EXPR:
10750 /* This should have been stripped above. */
10751 gcc_unreachable ();
10753 case ASM_EXPR:
10754 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
10755 break;
10757 case TRY_FINALLY_EXPR:
10758 case TRY_CATCH_EXPR:
10760 gimple_seq eval, cleanup;
10761 gtry *try_;
10763 /* Calls to destructors are generated automatically in FINALLY/CATCH
10764 block. They should have location as UNKNOWN_LOCATION. However,
10765 gimplify_call_expr will reset these call stmts to input_location
10766 if it finds stmt's location is unknown. To prevent resetting for
10767 destructors, we set the input_location to unknown.
10768 Note that this only affects the destructor calls in FINALLY/CATCH
10769 block, and will automatically reset to its original value by the
10770 end of gimplify_expr. */
10771 input_location = UNKNOWN_LOCATION;
10772 eval = cleanup = NULL;
10773 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
10774 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
10775 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
10776 if (gimple_seq_empty_p (cleanup))
10778 gimple_seq_add_seq (pre_p, eval);
10779 ret = GS_ALL_DONE;
10780 break;
10782 try_ = gimple_build_try (eval, cleanup,
10783 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
10784 ? GIMPLE_TRY_FINALLY
10785 : GIMPLE_TRY_CATCH);
10786 if (EXPR_HAS_LOCATION (save_expr))
10787 gimple_set_location (try_, EXPR_LOCATION (save_expr));
10788 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
10789 gimple_set_location (try_, saved_location);
10790 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
10791 gimple_try_set_catch_is_cleanup (try_,
10792 TRY_CATCH_IS_CLEANUP (*expr_p));
10793 gimplify_seq_add_stmt (pre_p, try_);
10794 ret = GS_ALL_DONE;
10795 break;
10798 case CLEANUP_POINT_EXPR:
10799 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
10800 break;
10802 case TARGET_EXPR:
10803 ret = gimplify_target_expr (expr_p, pre_p, post_p);
10804 break;
10806 case CATCH_EXPR:
10808 gimple *c;
10809 gimple_seq handler = NULL;
10810 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
10811 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
10812 gimplify_seq_add_stmt (pre_p, c);
10813 ret = GS_ALL_DONE;
10814 break;
10817 case EH_FILTER_EXPR:
10819 gimple *ehf;
10820 gimple_seq failure = NULL;
10822 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
10823 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
10824 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
10825 gimplify_seq_add_stmt (pre_p, ehf);
10826 ret = GS_ALL_DONE;
10827 break;
10830 case OBJ_TYPE_REF:
10832 enum gimplify_status r0, r1;
10833 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
10834 post_p, is_gimple_val, fb_rvalue);
10835 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
10836 post_p, is_gimple_val, fb_rvalue);
10837 TREE_SIDE_EFFECTS (*expr_p) = 0;
10838 ret = MIN (r0, r1);
10840 break;
10842 case LABEL_DECL:
10843 /* We get here when taking the address of a label. We mark
10844 the label as "forced"; meaning it can never be removed and
10845 it is a potential target for any computed goto. */
10846 FORCED_LABEL (*expr_p) = 1;
10847 ret = GS_ALL_DONE;
10848 break;
10850 case STATEMENT_LIST:
10851 ret = gimplify_statement_list (expr_p, pre_p);
10852 break;
10854 case WITH_SIZE_EXPR:
10856 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10857 post_p == &internal_post ? NULL : post_p,
10858 gimple_test_f, fallback);
10859 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
10860 is_gimple_val, fb_rvalue);
10861 ret = GS_ALL_DONE;
10863 break;
10865 case VAR_DECL:
10866 case PARM_DECL:
10867 ret = gimplify_var_or_parm_decl (expr_p);
10868 break;
10870 case RESULT_DECL:
10871 /* When within an OMP context, notice uses of variables. */
10872 if (gimplify_omp_ctxp)
10873 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
10874 ret = GS_ALL_DONE;
10875 break;
10877 case SSA_NAME:
10878 /* Allow callbacks into the gimplifier during optimization. */
10879 ret = GS_ALL_DONE;
10880 break;
10882 case OMP_PARALLEL:
10883 gimplify_omp_parallel (expr_p, pre_p);
10884 ret = GS_ALL_DONE;
10885 break;
10887 case OMP_TASK:
10888 gimplify_omp_task (expr_p, pre_p);
10889 ret = GS_ALL_DONE;
10890 break;
10892 case OMP_FOR:
10893 case OMP_SIMD:
10894 case CILK_SIMD:
10895 case CILK_FOR:
10896 case OMP_DISTRIBUTE:
10897 case OMP_TASKLOOP:
10898 case OACC_LOOP:
10899 ret = gimplify_omp_for (expr_p, pre_p);
10900 break;
10902 case OACC_CACHE:
10903 gimplify_oacc_cache (expr_p, pre_p);
10904 ret = GS_ALL_DONE;
10905 break;
10907 case OACC_DECLARE:
10908 gimplify_oacc_declare (expr_p, pre_p);
10909 ret = GS_ALL_DONE;
10910 break;
10912 case OACC_HOST_DATA:
10913 case OACC_DATA:
10914 case OACC_KERNELS:
10915 case OACC_PARALLEL:
10916 case OMP_SECTIONS:
10917 case OMP_SINGLE:
10918 case OMP_TARGET:
10919 case OMP_TARGET_DATA:
10920 case OMP_TEAMS:
10921 gimplify_omp_workshare (expr_p, pre_p);
10922 ret = GS_ALL_DONE;
10923 break;
10925 case OACC_ENTER_DATA:
10926 case OACC_EXIT_DATA:
10927 case OACC_UPDATE:
10928 case OMP_TARGET_UPDATE:
10929 case OMP_TARGET_ENTER_DATA:
10930 case OMP_TARGET_EXIT_DATA:
10931 gimplify_omp_target_update (expr_p, pre_p);
10932 ret = GS_ALL_DONE;
10933 break;
10935 case OMP_SECTION:
10936 case OMP_MASTER:
10937 case OMP_TASKGROUP:
10938 case OMP_ORDERED:
10939 case OMP_CRITICAL:
10941 gimple_seq body = NULL;
10942 gimple *g;
10944 gimplify_and_add (OMP_BODY (*expr_p), &body);
10945 switch (TREE_CODE (*expr_p))
10947 case OMP_SECTION:
10948 g = gimple_build_omp_section (body);
10949 break;
10950 case OMP_MASTER:
10951 g = gimple_build_omp_master (body);
10952 break;
10953 case OMP_TASKGROUP:
10955 gimple_seq cleanup = NULL;
10956 tree fn
10957 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
10958 g = gimple_build_call (fn, 0);
10959 gimple_seq_add_stmt (&cleanup, g);
10960 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10961 body = NULL;
10962 gimple_seq_add_stmt (&body, g);
10963 g = gimple_build_omp_taskgroup (body);
10965 break;
10966 case OMP_ORDERED:
10967 g = gimplify_omp_ordered (*expr_p, body);
10968 break;
10969 case OMP_CRITICAL:
10970 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
10971 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
10972 gimplify_adjust_omp_clauses (pre_p, body,
10973 &OMP_CRITICAL_CLAUSES (*expr_p),
10974 OMP_CRITICAL);
10975 g = gimple_build_omp_critical (body,
10976 OMP_CRITICAL_NAME (*expr_p),
10977 OMP_CRITICAL_CLAUSES (*expr_p));
10978 break;
10979 default:
10980 gcc_unreachable ();
10982 gimplify_seq_add_stmt (pre_p, g);
10983 ret = GS_ALL_DONE;
10984 break;
10987 case OMP_ATOMIC:
10988 case OMP_ATOMIC_READ:
10989 case OMP_ATOMIC_CAPTURE_OLD:
10990 case OMP_ATOMIC_CAPTURE_NEW:
10991 ret = gimplify_omp_atomic (expr_p, pre_p);
10992 break;
10994 case TRANSACTION_EXPR:
10995 ret = gimplify_transaction (expr_p, pre_p);
10996 break;
10998 case TRUTH_AND_EXPR:
10999 case TRUTH_OR_EXPR:
11000 case TRUTH_XOR_EXPR:
11002 tree orig_type = TREE_TYPE (*expr_p);
11003 tree new_type, xop0, xop1;
11004 *expr_p = gimple_boolify (*expr_p);
11005 new_type = TREE_TYPE (*expr_p);
11006 if (!useless_type_conversion_p (orig_type, new_type))
11008 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11009 ret = GS_OK;
11010 break;
11013 /* Boolified binary truth expressions are semantically equivalent
11014 to bitwise binary expressions. Canonicalize them to the
11015 bitwise variant. */
11016 switch (TREE_CODE (*expr_p))
11018 case TRUTH_AND_EXPR:
11019 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11020 break;
11021 case TRUTH_OR_EXPR:
11022 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11023 break;
11024 case TRUTH_XOR_EXPR:
11025 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11026 break;
11027 default:
11028 break;
11030 /* Now make sure that operands have compatible type to
11031 expression's new_type. */
11032 xop0 = TREE_OPERAND (*expr_p, 0);
11033 xop1 = TREE_OPERAND (*expr_p, 1);
11034 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11035 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11036 new_type,
11037 xop0);
11038 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11039 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11040 new_type,
11041 xop1);
11042 /* Continue classified as tcc_binary. */
11043 goto expr_2;
11046 case VEC_COND_EXPR:
11048 enum gimplify_status r0, r1, r2;
11050 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11051 post_p, is_gimple_condexpr, fb_rvalue);
11052 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11053 post_p, is_gimple_val, fb_rvalue);
11054 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11055 post_p, is_gimple_val, fb_rvalue);
11057 ret = MIN (MIN (r0, r1), r2);
11058 recalculate_side_effects (*expr_p);
11060 break;
11062 case FMA_EXPR:
11063 case VEC_PERM_EXPR:
11064 /* Classified as tcc_expression. */
11065 goto expr_3;
11067 case BIT_INSERT_EXPR:
11068 /* Argument 3 is a constant. */
11069 goto expr_2;
11071 case POINTER_PLUS_EXPR:
11073 enum gimplify_status r0, r1;
11074 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11075 post_p, is_gimple_val, fb_rvalue);
11076 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11077 post_p, is_gimple_val, fb_rvalue);
11078 recalculate_side_effects (*expr_p);
11079 ret = MIN (r0, r1);
11080 break;
11083 case CILK_SYNC_STMT:
11085 if (!fn_contains_cilk_spawn_p (cfun))
11087 error_at (EXPR_LOCATION (*expr_p),
11088 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
11089 ret = GS_ERROR;
11091 else
11093 gimplify_cilk_sync (expr_p, pre_p);
11094 ret = GS_ALL_DONE;
11096 break;
11099 default:
11100 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
11102 case tcc_comparison:
11103 /* Handle comparison of objects of non scalar mode aggregates
11104 with a call to memcmp. It would be nice to only have to do
11105 this for variable-sized objects, but then we'd have to allow
11106 the same nest of reference nodes we allow for MODIFY_EXPR and
11107 that's too complex.
11109 Compare scalar mode aggregates as scalar mode values. Using
11110 memcmp for them would be very inefficient at best, and is
11111 plain wrong if bitfields are involved. */
11113 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
11115 /* Vector comparisons need no boolification. */
11116 if (TREE_CODE (type) == VECTOR_TYPE)
11117 goto expr_2;
11118 else if (!AGGREGATE_TYPE_P (type))
11120 tree org_type = TREE_TYPE (*expr_p);
11121 *expr_p = gimple_boolify (*expr_p);
11122 if (!useless_type_conversion_p (org_type,
11123 TREE_TYPE (*expr_p)))
11125 *expr_p = fold_convert_loc (input_location,
11126 org_type, *expr_p);
11127 ret = GS_OK;
11129 else
11130 goto expr_2;
11132 else if (TYPE_MODE (type) != BLKmode)
11133 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
11134 else
11135 ret = gimplify_variable_sized_compare (expr_p);
11137 break;
11140 /* If *EXPR_P does not need to be special-cased, handle it
11141 according to its class. */
11142 case tcc_unary:
11143 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11144 post_p, is_gimple_val, fb_rvalue);
11145 break;
11147 case tcc_binary:
11148 expr_2:
11150 enum gimplify_status r0, r1;
11152 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11153 post_p, is_gimple_val, fb_rvalue);
11154 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11155 post_p, is_gimple_val, fb_rvalue);
11157 ret = MIN (r0, r1);
11158 break;
11161 expr_3:
11163 enum gimplify_status r0, r1, r2;
11165 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11166 post_p, is_gimple_val, fb_rvalue);
11167 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11168 post_p, is_gimple_val, fb_rvalue);
11169 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11170 post_p, is_gimple_val, fb_rvalue);
11172 ret = MIN (MIN (r0, r1), r2);
11173 break;
11176 case tcc_declaration:
11177 case tcc_constant:
11178 ret = GS_ALL_DONE;
11179 goto dont_recalculate;
11181 default:
11182 gcc_unreachable ();
11185 recalculate_side_effects (*expr_p);
11187 dont_recalculate:
11188 break;
11191 gcc_assert (*expr_p || ret != GS_OK);
11193 while (ret == GS_OK);
11195 /* If we encountered an error_mark somewhere nested inside, either
11196 stub out the statement or propagate the error back out. */
11197 if (ret == GS_ERROR)
11199 if (is_statement)
11200 *expr_p = NULL;
11201 goto out;
11204 /* This was only valid as a return value from the langhook, which
11205 we handled. Make sure it doesn't escape from any other context. */
11206 gcc_assert (ret != GS_UNHANDLED);
11208 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
11210 /* We aren't looking for a value, and we don't have a valid
11211 statement. If it doesn't have side-effects, throw it away. */
11212 if (!TREE_SIDE_EFFECTS (*expr_p))
11213 *expr_p = NULL;
11214 else if (!TREE_THIS_VOLATILE (*expr_p))
11216 /* This is probably a _REF that contains something nested that
11217 has side effects. Recurse through the operands to find it. */
11218 enum tree_code code = TREE_CODE (*expr_p);
11220 switch (code)
11222 case COMPONENT_REF:
11223 case REALPART_EXPR:
11224 case IMAGPART_EXPR:
11225 case VIEW_CONVERT_EXPR:
11226 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11227 gimple_test_f, fallback);
11228 break;
11230 case ARRAY_REF:
11231 case ARRAY_RANGE_REF:
11232 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11233 gimple_test_f, fallback);
11234 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11235 gimple_test_f, fallback);
11236 break;
11238 default:
11239 /* Anything else with side-effects must be converted to
11240 a valid statement before we get here. */
11241 gcc_unreachable ();
11244 *expr_p = NULL;
11246 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
11247 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
11249 /* Historically, the compiler has treated a bare reference
11250 to a non-BLKmode volatile lvalue as forcing a load. */
11251 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
11253 /* Normally, we do not want to create a temporary for a
11254 TREE_ADDRESSABLE type because such a type should not be
11255 copied by bitwise-assignment. However, we make an
11256 exception here, as all we are doing here is ensuring that
11257 we read the bytes that make up the type. We use
11258 create_tmp_var_raw because create_tmp_var will abort when
11259 given a TREE_ADDRESSABLE type. */
11260 tree tmp = create_tmp_var_raw (type, "vol");
11261 gimple_add_tmp_var (tmp);
11262 gimplify_assign (tmp, *expr_p, pre_p);
11263 *expr_p = NULL;
11265 else
11266 /* We can't do anything useful with a volatile reference to
11267 an incomplete type, so just throw it away. Likewise for
11268 a BLKmode type, since any implicit inner load should
11269 already have been turned into an explicit one by the
11270 gimplification process. */
11271 *expr_p = NULL;
11274 /* If we are gimplifying at the statement level, we're done. Tack
11275 everything together and return. */
11276 if (fallback == fb_none || is_statement)
11278 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
11279 it out for GC to reclaim it. */
11280 *expr_p = NULL_TREE;
11282 if (!gimple_seq_empty_p (internal_pre)
11283 || !gimple_seq_empty_p (internal_post))
11285 gimplify_seq_add_seq (&internal_pre, internal_post);
11286 gimplify_seq_add_seq (pre_p, internal_pre);
11289 /* The result of gimplifying *EXPR_P is going to be the last few
11290 statements in *PRE_P and *POST_P. Add location information
11291 to all the statements that were added by the gimplification
11292 helpers. */
11293 if (!gimple_seq_empty_p (*pre_p))
11294 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
11296 if (!gimple_seq_empty_p (*post_p))
11297 annotate_all_with_location_after (*post_p, post_last_gsi,
11298 input_location);
11300 goto out;
11303 #ifdef ENABLE_GIMPLE_CHECKING
11304 if (*expr_p)
11306 enum tree_code code = TREE_CODE (*expr_p);
11307 /* These expressions should already be in gimple IR form. */
11308 gcc_assert (code != MODIFY_EXPR
11309 && code != ASM_EXPR
11310 && code != BIND_EXPR
11311 && code != CATCH_EXPR
11312 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
11313 && code != EH_FILTER_EXPR
11314 && code != GOTO_EXPR
11315 && code != LABEL_EXPR
11316 && code != LOOP_EXPR
11317 && code != SWITCH_EXPR
11318 && code != TRY_FINALLY_EXPR
11319 && code != OACC_PARALLEL
11320 && code != OACC_KERNELS
11321 && code != OACC_DATA
11322 && code != OACC_HOST_DATA
11323 && code != OACC_DECLARE
11324 && code != OACC_UPDATE
11325 && code != OACC_ENTER_DATA
11326 && code != OACC_EXIT_DATA
11327 && code != OACC_CACHE
11328 && code != OMP_CRITICAL
11329 && code != OMP_FOR
11330 && code != OACC_LOOP
11331 && code != OMP_MASTER
11332 && code != OMP_TASKGROUP
11333 && code != OMP_ORDERED
11334 && code != OMP_PARALLEL
11335 && code != OMP_SECTIONS
11336 && code != OMP_SECTION
11337 && code != OMP_SINGLE);
11339 #endif
11341 /* Otherwise we're gimplifying a subexpression, so the resulting
11342 value is interesting. If it's a valid operand that matches
11343 GIMPLE_TEST_F, we're done. Unless we are handling some
11344 post-effects internally; if that's the case, we need to copy into
11345 a temporary before adding the post-effects to POST_P. */
11346 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
11347 goto out;
11349 /* Otherwise, we need to create a new temporary for the gimplified
11350 expression. */
11352 /* We can't return an lvalue if we have an internal postqueue. The
11353 object the lvalue refers to would (probably) be modified by the
11354 postqueue; we need to copy the value out first, which means an
11355 rvalue. */
11356 if ((fallback & fb_lvalue)
11357 && gimple_seq_empty_p (internal_post)
11358 && is_gimple_addressable (*expr_p))
11360 /* An lvalue will do. Take the address of the expression, store it
11361 in a temporary, and replace the expression with an INDIRECT_REF of
11362 that temporary. */
11363 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
11364 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
11365 *expr_p = build_simple_mem_ref (tmp);
11367 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
11369 /* An rvalue will do. Assign the gimplified expression into a
11370 new temporary TMP and replace the original expression with
11371 TMP. First, make sure that the expression has a type so that
11372 it can be assigned into a temporary. */
11373 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
11374 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
11376 else
11378 #ifdef ENABLE_GIMPLE_CHECKING
11379 if (!(fallback & fb_mayfail))
11381 fprintf (stderr, "gimplification failed:\n");
11382 print_generic_expr (stderr, *expr_p, 0);
11383 debug_tree (*expr_p);
11384 internal_error ("gimplification failed");
11386 #endif
11387 gcc_assert (fallback & fb_mayfail);
11389 /* If this is an asm statement, and the user asked for the
11390 impossible, don't die. Fail and let gimplify_asm_expr
11391 issue an error. */
11392 ret = GS_ERROR;
11393 goto out;
11396 /* Make sure the temporary matches our predicate. */
11397 gcc_assert ((*gimple_test_f) (*expr_p));
11399 if (!gimple_seq_empty_p (internal_post))
11401 annotate_all_with_location (internal_post, input_location);
11402 gimplify_seq_add_seq (pre_p, internal_post);
11405 out:
11406 input_location = saved_location;
11407 return ret;
11410 /* Like gimplify_expr but make sure the gimplified result is not itself
11411 a SSA name (but a decl if it were). Temporaries required by
11412 evaluating *EXPR_P may be still SSA names. */
11414 static enum gimplify_status
11415 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11416 bool (*gimple_test_f) (tree), fallback_t fallback,
11417 bool allow_ssa)
11419 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
11420 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
11421 gimple_test_f, fallback);
11422 if (! allow_ssa
11423 && TREE_CODE (*expr_p) == SSA_NAME)
11425 tree name = *expr_p;
11426 if (was_ssa_name_p)
11427 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
11428 else
11430 /* Avoid the extra copy if possible. */
11431 *expr_p = create_tmp_reg (TREE_TYPE (name));
11432 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
11433 release_ssa_name (name);
11436 return ret;
11439 /* Look through TYPE for variable-sized objects and gimplify each such
11440 size that we find. Add to LIST_P any statements generated. */
11442 void
11443 gimplify_type_sizes (tree type, gimple_seq *list_p)
11445 tree field, t;
11447 if (type == NULL || type == error_mark_node)
11448 return;
11450 /* We first do the main variant, then copy into any other variants. */
11451 type = TYPE_MAIN_VARIANT (type);
11453 /* Avoid infinite recursion. */
11454 if (TYPE_SIZES_GIMPLIFIED (type))
11455 return;
11457 TYPE_SIZES_GIMPLIFIED (type) = 1;
11459 switch (TREE_CODE (type))
11461 case INTEGER_TYPE:
11462 case ENUMERAL_TYPE:
11463 case BOOLEAN_TYPE:
11464 case REAL_TYPE:
11465 case FIXED_POINT_TYPE:
11466 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
11467 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
11469 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
11471 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
11472 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
11474 break;
11476 case ARRAY_TYPE:
11477 /* These types may not have declarations, so handle them here. */
11478 gimplify_type_sizes (TREE_TYPE (type), list_p);
11479 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
11480 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
11481 with assigned stack slots, for -O1+ -g they should be tracked
11482 by VTA. */
11483 if (!(TYPE_NAME (type)
11484 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
11485 && DECL_IGNORED_P (TYPE_NAME (type)))
11486 && TYPE_DOMAIN (type)
11487 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
11489 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
11490 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11491 DECL_IGNORED_P (t) = 0;
11492 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
11493 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11494 DECL_IGNORED_P (t) = 0;
11496 break;
11498 case RECORD_TYPE:
11499 case UNION_TYPE:
11500 case QUAL_UNION_TYPE:
11501 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
11502 if (TREE_CODE (field) == FIELD_DECL)
11504 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
11505 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
11506 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
11507 gimplify_type_sizes (TREE_TYPE (field), list_p);
11509 break;
11511 case POINTER_TYPE:
11512 case REFERENCE_TYPE:
11513 /* We used to recurse on the pointed-to type here, which turned out to
11514 be incorrect because its definition might refer to variables not
11515 yet initialized at this point if a forward declaration is involved.
11517 It was actually useful for anonymous pointed-to types to ensure
11518 that the sizes evaluation dominates every possible later use of the
11519 values. Restricting to such types here would be safe since there
11520 is no possible forward declaration around, but would introduce an
11521 undesirable middle-end semantic to anonymity. We then defer to
11522 front-ends the responsibility of ensuring that the sizes are
11523 evaluated both early and late enough, e.g. by attaching artificial
11524 type declarations to the tree. */
11525 break;
11527 default:
11528 break;
11531 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
11532 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
11534 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
11536 TYPE_SIZE (t) = TYPE_SIZE (type);
11537 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
11538 TYPE_SIZES_GIMPLIFIED (t) = 1;
11542 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
11543 a size or position, has had all of its SAVE_EXPRs evaluated.
11544 We add any required statements to *STMT_P. */
11546 void
11547 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
11549 tree expr = *expr_p;
11551 /* We don't do anything if the value isn't there, is constant, or contains
11552 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
11553 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
11554 will want to replace it with a new variable, but that will cause problems
11555 if this type is from outside the function. It's OK to have that here. */
11556 if (is_gimple_sizepos (expr))
11557 return;
11559 *expr_p = unshare_expr (expr);
11561 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
11562 if the def vanishes. */
11563 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
11566 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
11567 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
11568 is true, also gimplify the parameters. */
11570 gbind *
11571 gimplify_body (tree fndecl, bool do_parms)
11573 location_t saved_location = input_location;
11574 gimple_seq parm_stmts, seq;
11575 gimple *outer_stmt;
11576 gbind *outer_bind;
11577 struct cgraph_node *cgn;
11579 timevar_push (TV_TREE_GIMPLIFY);
11581 init_tree_ssa (cfun);
11583 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
11584 gimplification. */
11585 default_rtl_profile ();
11587 gcc_assert (gimplify_ctxp == NULL);
11588 push_gimplify_context (true);
11590 if (flag_openacc || flag_openmp)
11592 gcc_assert (gimplify_omp_ctxp == NULL);
11593 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
11594 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
11597 /* Unshare most shared trees in the body and in that of any nested functions.
11598 It would seem we don't have to do this for nested functions because
11599 they are supposed to be output and then the outer function gimplified
11600 first, but the g++ front end doesn't always do it that way. */
11601 unshare_body (fndecl);
11602 unvisit_body (fndecl);
11604 cgn = cgraph_node::get (fndecl);
11605 if (cgn && cgn->origin)
11606 nonlocal_vlas = new hash_set<tree>;
11608 /* Make sure input_location isn't set to something weird. */
11609 input_location = DECL_SOURCE_LOCATION (fndecl);
11611 /* Resolve callee-copies. This has to be done before processing
11612 the body so that DECL_VALUE_EXPR gets processed correctly. */
11613 parm_stmts = do_parms ? gimplify_parameters () : NULL;
11615 /* Gimplify the function's body. */
11616 seq = NULL;
11617 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
11618 outer_stmt = gimple_seq_first_stmt (seq);
11619 if (!outer_stmt)
11621 outer_stmt = gimple_build_nop ();
11622 gimplify_seq_add_stmt (&seq, outer_stmt);
11625 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
11626 not the case, wrap everything in a GIMPLE_BIND to make it so. */
11627 if (gimple_code (outer_stmt) == GIMPLE_BIND
11628 && gimple_seq_first (seq) == gimple_seq_last (seq))
11629 outer_bind = as_a <gbind *> (outer_stmt);
11630 else
11631 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
11633 DECL_SAVED_TREE (fndecl) = NULL_TREE;
11635 /* If we had callee-copies statements, insert them at the beginning
11636 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
11637 if (!gimple_seq_empty_p (parm_stmts))
11639 tree parm;
11641 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
11642 gimple_bind_set_body (outer_bind, parm_stmts);
11644 for (parm = DECL_ARGUMENTS (current_function_decl);
11645 parm; parm = DECL_CHAIN (parm))
11646 if (DECL_HAS_VALUE_EXPR_P (parm))
11648 DECL_HAS_VALUE_EXPR_P (parm) = 0;
11649 DECL_IGNORED_P (parm) = 0;
11653 if (nonlocal_vlas)
11655 if (nonlocal_vla_vars)
11657 /* tree-nested.c may later on call declare_vars (..., true);
11658 which relies on BLOCK_VARS chain to be the tail of the
11659 gimple_bind_vars chain. Ensure we don't violate that
11660 assumption. */
11661 if (gimple_bind_block (outer_bind)
11662 == DECL_INITIAL (current_function_decl))
11663 declare_vars (nonlocal_vla_vars, outer_bind, true);
11664 else
11665 BLOCK_VARS (DECL_INITIAL (current_function_decl))
11666 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
11667 nonlocal_vla_vars);
11668 nonlocal_vla_vars = NULL_TREE;
11670 delete nonlocal_vlas;
11671 nonlocal_vlas = NULL;
11674 if ((flag_openacc || flag_openmp || flag_openmp_simd)
11675 && gimplify_omp_ctxp)
11677 delete_omp_context (gimplify_omp_ctxp);
11678 gimplify_omp_ctxp = NULL;
11681 pop_gimplify_context (outer_bind);
11682 gcc_assert (gimplify_ctxp == NULL);
11684 if (flag_checking && !seen_error ())
11685 verify_gimple_in_seq (gimple_bind_body (outer_bind));
11687 timevar_pop (TV_TREE_GIMPLIFY);
11688 input_location = saved_location;
11690 return outer_bind;
11693 typedef char *char_p; /* For DEF_VEC_P. */
11695 /* Return whether we should exclude FNDECL from instrumentation. */
11697 static bool
11698 flag_instrument_functions_exclude_p (tree fndecl)
11700 vec<char_p> *v;
11702 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
11703 if (v && v->length () > 0)
11705 const char *name;
11706 int i;
11707 char *s;
11709 name = lang_hooks.decl_printable_name (fndecl, 0);
11710 FOR_EACH_VEC_ELT (*v, i, s)
11711 if (strstr (name, s) != NULL)
11712 return true;
11715 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
11716 if (v && v->length () > 0)
11718 const char *name;
11719 int i;
11720 char *s;
11722 name = DECL_SOURCE_FILE (fndecl);
11723 FOR_EACH_VEC_ELT (*v, i, s)
11724 if (strstr (name, s) != NULL)
11725 return true;
11728 return false;
11731 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
11732 node for the function we want to gimplify.
11734 Return the sequence of GIMPLE statements corresponding to the body
11735 of FNDECL. */
11737 void
11738 gimplify_function_tree (tree fndecl)
11740 tree parm, ret;
11741 gimple_seq seq;
11742 gbind *bind;
11744 gcc_assert (!gimple_body (fndecl));
11746 if (DECL_STRUCT_FUNCTION (fndecl))
11747 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
11748 else
11749 push_struct_function (fndecl);
11751 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
11752 if necessary. */
11753 cfun->curr_properties |= PROP_gimple_lva;
11755 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
11757 /* Preliminarily mark non-addressed complex variables as eligible
11758 for promotion to gimple registers. We'll transform their uses
11759 as we find them. */
11760 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
11761 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
11762 && !TREE_THIS_VOLATILE (parm)
11763 && !needs_to_live_in_memory (parm))
11764 DECL_GIMPLE_REG_P (parm) = 1;
11767 ret = DECL_RESULT (fndecl);
11768 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
11769 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
11770 && !needs_to_live_in_memory (ret))
11771 DECL_GIMPLE_REG_P (ret) = 1;
11773 bind = gimplify_body (fndecl, true);
11775 /* The tree body of the function is no longer needed, replace it
11776 with the new GIMPLE body. */
11777 seq = NULL;
11778 gimple_seq_add_stmt (&seq, bind);
11779 gimple_set_body (fndecl, seq);
11781 /* If we're instrumenting function entry/exit, then prepend the call to
11782 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
11783 catch the exit hook. */
11784 /* ??? Add some way to ignore exceptions for this TFE. */
11785 if (flag_instrument_function_entry_exit
11786 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
11787 && !flag_instrument_functions_exclude_p (fndecl))
11789 tree x;
11790 gbind *new_bind;
11791 gimple *tf;
11792 gimple_seq cleanup = NULL, body = NULL;
11793 tree tmp_var;
11794 gcall *call;
11796 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
11797 call = gimple_build_call (x, 1, integer_zero_node);
11798 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11799 gimple_call_set_lhs (call, tmp_var);
11800 gimplify_seq_add_stmt (&cleanup, call);
11801 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
11802 call = gimple_build_call (x, 2,
11803 build_fold_addr_expr (current_function_decl),
11804 tmp_var);
11805 gimplify_seq_add_stmt (&cleanup, call);
11806 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
11808 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
11809 call = gimple_build_call (x, 1, integer_zero_node);
11810 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11811 gimple_call_set_lhs (call, tmp_var);
11812 gimplify_seq_add_stmt (&body, call);
11813 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
11814 call = gimple_build_call (x, 2,
11815 build_fold_addr_expr (current_function_decl),
11816 tmp_var);
11817 gimplify_seq_add_stmt (&body, call);
11818 gimplify_seq_add_stmt (&body, tf);
11819 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
11820 /* Clear the block for BIND, since it is no longer directly inside
11821 the function, but within a try block. */
11822 gimple_bind_set_block (bind, NULL);
11824 /* Replace the current function body with the body
11825 wrapped in the try/finally TF. */
11826 seq = NULL;
11827 gimple_seq_add_stmt (&seq, new_bind);
11828 gimple_set_body (fndecl, seq);
11829 bind = new_bind;
11832 if ((flag_sanitize & SANITIZE_THREAD) != 0
11833 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
11835 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
11836 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
11837 gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
11838 /* Clear the block for BIND, since it is no longer directly inside
11839 the function, but within a try block. */
11840 gimple_bind_set_block (bind, NULL);
11841 /* Replace the current function body with the body
11842 wrapped in the try/finally TF. */
11843 seq = NULL;
11844 gimple_seq_add_stmt (&seq, new_bind);
11845 gimple_set_body (fndecl, seq);
11848 DECL_SAVED_TREE (fndecl) = NULL_TREE;
11849 cfun->curr_properties |= PROP_gimple_any;
11851 pop_cfun ();
11853 dump_function (TDI_generic, fndecl);
11856 /* Return a dummy expression of type TYPE in order to keep going after an
11857 error. */
11859 static tree
11860 dummy_object (tree type)
11862 tree t = build_int_cst (build_pointer_type (type), 0);
11863 return build2 (MEM_REF, type, t, t);
11866 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
11867 builtin function, but a very special sort of operator. */
11869 enum gimplify_status
11870 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
11871 gimple_seq *post_p ATTRIBUTE_UNUSED)
11873 tree promoted_type, have_va_type;
11874 tree valist = TREE_OPERAND (*expr_p, 0);
11875 tree type = TREE_TYPE (*expr_p);
11876 tree t, tag, aptag;
11877 location_t loc = EXPR_LOCATION (*expr_p);
11879 /* Verify that valist is of the proper type. */
11880 have_va_type = TREE_TYPE (valist);
11881 if (have_va_type == error_mark_node)
11882 return GS_ERROR;
11883 have_va_type = targetm.canonical_va_list_type (have_va_type);
11885 if (have_va_type == NULL_TREE)
11887 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
11888 return GS_ERROR;
11891 /* Generate a diagnostic for requesting data of a type that cannot
11892 be passed through `...' due to type promotion at the call site. */
11893 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
11894 != type)
11896 static bool gave_help;
11897 bool warned;
11898 /* Use the expansion point to handle cases such as passing bool (defined
11899 in a system header) through `...'. */
11900 source_location xloc
11901 = expansion_point_location_if_in_system_header (loc);
11903 /* Unfortunately, this is merely undefined, rather than a constraint
11904 violation, so we cannot make this an error. If this call is never
11905 executed, the program is still strictly conforming. */
11906 warned = warning_at (xloc, 0,
11907 "%qT is promoted to %qT when passed through %<...%>",
11908 type, promoted_type);
11909 if (!gave_help && warned)
11911 gave_help = true;
11912 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
11913 promoted_type, type);
11916 /* We can, however, treat "undefined" any way we please.
11917 Call abort to encourage the user to fix the program. */
11918 if (warned)
11919 inform (xloc, "if this code is reached, the program will abort");
11920 /* Before the abort, allow the evaluation of the va_list
11921 expression to exit or longjmp. */
11922 gimplify_and_add (valist, pre_p);
11923 t = build_call_expr_loc (loc,
11924 builtin_decl_implicit (BUILT_IN_TRAP), 0);
11925 gimplify_and_add (t, pre_p);
11927 /* This is dead code, but go ahead and finish so that the
11928 mode of the result comes out right. */
11929 *expr_p = dummy_object (type);
11930 return GS_ALL_DONE;
11933 tag = build_int_cst (build_pointer_type (type), 0);
11934 aptag = build_int_cst (TREE_TYPE (valist), 0);
11936 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
11937 valist, tag, aptag);
11939 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
11940 needs to be expanded. */
11941 cfun->curr_properties &= ~PROP_gimple_lva;
11943 return GS_OK;
11946 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
11948 DST/SRC are the destination and source respectively. You can pass
11949 ungimplified trees in DST or SRC, in which case they will be
11950 converted to a gimple operand if necessary.
11952 This function returns the newly created GIMPLE_ASSIGN tuple. */
11954 gimple *
11955 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
11957 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11958 gimplify_and_add (t, seq_p);
11959 ggc_free (t);
11960 return gimple_seq_last_stmt (*seq_p);
11963 inline hashval_t
11964 gimplify_hasher::hash (const elt_t *p)
11966 tree t = p->val;
11967 return iterative_hash_expr (t, 0);
11970 inline bool
11971 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
11973 tree t1 = p1->val;
11974 tree t2 = p2->val;
11975 enum tree_code code = TREE_CODE (t1);
11977 if (TREE_CODE (t2) != code
11978 || TREE_TYPE (t1) != TREE_TYPE (t2))
11979 return false;
11981 if (!operand_equal_p (t1, t2, 0))
11982 return false;
11984 /* Only allow them to compare equal if they also hash equal; otherwise
11985 results are nondeterminate, and we fail bootstrap comparison. */
11986 gcc_checking_assert (hash (p1) == hash (p2));
11988 return true;