2015-11-18 Alan Modra <amodra@gmail.com>
[official-gcc.git] / gcc / gimplify.c
bloba3ed3784f49484df8a8a6d989ee11cc1f6e250cd
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa.h"
54 #include "omp-low.h"
55 #include "gimple-low.h"
56 #include "cilk.h"
57 #include "gomp-constants.h"
58 #include "tree-dump.h"
60 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
61 #include "builtins.h"
63 enum gimplify_omp_var_data
65 GOVD_SEEN = 1,
66 GOVD_EXPLICIT = 2,
67 GOVD_SHARED = 4,
68 GOVD_PRIVATE = 8,
69 GOVD_FIRSTPRIVATE = 16,
70 GOVD_LASTPRIVATE = 32,
71 GOVD_REDUCTION = 64,
72 GOVD_LOCAL = 128,
73 GOVD_MAP = 256,
74 GOVD_DEBUG_PRIVATE = 512,
75 GOVD_PRIVATE_OUTER_REF = 1024,
76 GOVD_LINEAR = 2048,
77 GOVD_ALIGNED = 4096,
79 /* Flag for GOVD_MAP: don't copy back. */
80 GOVD_MAP_TO_ONLY = 8192,
82 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
83 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
85 GOVD_MAP_0LEN_ARRAY = 32768,
87 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
88 GOVD_MAP_ALWAYS_TO = 65536,
90 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
91 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
92 | GOVD_LOCAL)
96 enum omp_region_type
98 ORT_WORKSHARE = 0x00,
99 ORT_SIMD = 0x01,
101 ORT_PARALLEL = 0x02,
102 ORT_COMBINED_PARALLEL = 0x03,
104 ORT_TASK = 0x04,
105 ORT_UNTIED_TASK = 0x05,
107 ORT_TEAMS = 0x08,
108 ORT_COMBINED_TEAMS = 0x09,
110 /* Data region. */
111 ORT_TARGET_DATA = 0x10,
113 /* Data region with offloading. */
114 ORT_TARGET = 0x20,
115 ORT_COMBINED_TARGET = 0x21,
117 /* OpenACC variants. */
118 ORT_ACC = 0x40, /* A generic OpenACC region. */
119 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
120 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
121 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
123 /* Dummy OpenMP region, used to disable expansion of
124 DECL_VALUE_EXPRs in taskloop pre body. */
125 ORT_NONE = 0x100
128 /* Gimplify hashtable helper. */
130 struct gimplify_hasher : free_ptr_hash <elt_t>
132 static inline hashval_t hash (const elt_t *);
133 static inline bool equal (const elt_t *, const elt_t *);
136 struct gimplify_ctx
138 struct gimplify_ctx *prev_context;
140 vec<gbind *> bind_expr_stack;
141 tree temps;
142 gimple_seq conditional_cleanups;
143 tree exit_label;
144 tree return_temp;
146 vec<tree> case_labels;
147 /* The formal temporary table. Should this be persistent? */
148 hash_table<gimplify_hasher> *temp_htab;
150 int conditions;
151 bool save_stack;
152 bool into_ssa;
153 bool allow_rhs_cond_expr;
154 bool in_cleanup_point_expr;
157 struct gimplify_omp_ctx
159 struct gimplify_omp_ctx *outer_context;
160 splay_tree variables;
161 hash_set<tree> *privatized_types;
162 /* Iteration variables in an OMP_FOR. */
163 vec<tree> loop_iter_var;
164 location_t location;
165 enum omp_clause_default_kind default_kind;
166 enum omp_region_type region_type;
167 bool combined_loop;
168 bool distribute;
169 bool target_map_scalars_firstprivate;
170 bool target_map_pointers_as_0len_arrays;
171 bool target_firstprivatize_array_bases;
174 static struct gimplify_ctx *gimplify_ctxp;
175 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
177 /* Forward declaration. */
178 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
179 static hash_map<tree, tree> *oacc_declare_returns;
181 /* Shorter alias name for the above function for use in gimplify.c
182 only. */
184 static inline void
185 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
187 gimple_seq_add_stmt_without_update (seq_p, gs);
190 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
191 NULL, a new sequence is allocated. This function is
192 similar to gimple_seq_add_seq, but does not scan the operands.
193 During gimplification, we need to manipulate statement sequences
194 before the def/use vectors have been constructed. */
196 static void
197 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
199 gimple_stmt_iterator si;
201 if (src == NULL)
202 return;
204 si = gsi_last (*dst_p);
205 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
209 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
210 and popping gimplify contexts. */
212 static struct gimplify_ctx *ctx_pool = NULL;
214 /* Return a gimplify context struct from the pool. */
216 static inline struct gimplify_ctx *
217 ctx_alloc (void)
219 struct gimplify_ctx * c = ctx_pool;
221 if (c)
222 ctx_pool = c->prev_context;
223 else
224 c = XNEW (struct gimplify_ctx);
226 memset (c, '\0', sizeof (*c));
227 return c;
230 /* Put gimplify context C back into the pool. */
232 static inline void
233 ctx_free (struct gimplify_ctx *c)
235 c->prev_context = ctx_pool;
236 ctx_pool = c;
239 /* Free allocated ctx stack memory. */
241 void
242 free_gimplify_stack (void)
244 struct gimplify_ctx *c;
246 while ((c = ctx_pool))
248 ctx_pool = c->prev_context;
249 free (c);
254 /* Set up a context for the gimplifier. */
256 void
257 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
259 struct gimplify_ctx *c = ctx_alloc ();
261 c->prev_context = gimplify_ctxp;
262 gimplify_ctxp = c;
263 gimplify_ctxp->into_ssa = in_ssa;
264 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
267 /* Tear down a context for the gimplifier. If BODY is non-null, then
268 put the temporaries into the outer BIND_EXPR. Otherwise, put them
269 in the local_decls.
271 BODY is not a sequence, but the first tuple in a sequence. */
273 void
274 pop_gimplify_context (gimple *body)
276 struct gimplify_ctx *c = gimplify_ctxp;
278 gcc_assert (c
279 && (!c->bind_expr_stack.exists ()
280 || c->bind_expr_stack.is_empty ()));
281 c->bind_expr_stack.release ();
282 gimplify_ctxp = c->prev_context;
284 if (body)
285 declare_vars (c->temps, body, false);
286 else
287 record_vars (c->temps);
289 delete c->temp_htab;
290 c->temp_htab = NULL;
291 ctx_free (c);
294 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
296 static void
297 gimple_push_bind_expr (gbind *bind_stmt)
299 gimplify_ctxp->bind_expr_stack.reserve (8);
300 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
303 /* Pop the first element off the stack of bindings. */
305 static void
306 gimple_pop_bind_expr (void)
308 gimplify_ctxp->bind_expr_stack.pop ();
311 /* Return the first element of the stack of bindings. */
313 gbind *
314 gimple_current_bind_expr (void)
316 return gimplify_ctxp->bind_expr_stack.last ();
319 /* Return the stack of bindings created during gimplification. */
321 vec<gbind *>
322 gimple_bind_expr_stack (void)
324 return gimplify_ctxp->bind_expr_stack;
327 /* Return true iff there is a COND_EXPR between us and the innermost
328 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
330 static bool
331 gimple_conditional_context (void)
333 return gimplify_ctxp->conditions > 0;
336 /* Note that we've entered a COND_EXPR. */
338 static void
339 gimple_push_condition (void)
341 #ifdef ENABLE_GIMPLE_CHECKING
342 if (gimplify_ctxp->conditions == 0)
343 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
344 #endif
345 ++(gimplify_ctxp->conditions);
348 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
349 now, add any conditional cleanups we've seen to the prequeue. */
351 static void
352 gimple_pop_condition (gimple_seq *pre_p)
354 int conds = --(gimplify_ctxp->conditions);
356 gcc_assert (conds >= 0);
357 if (conds == 0)
359 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
360 gimplify_ctxp->conditional_cleanups = NULL;
364 /* A stable comparison routine for use with splay trees and DECLs. */
366 static int
367 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
369 tree a = (tree) xa;
370 tree b = (tree) xb;
372 return DECL_UID (a) - DECL_UID (b);
375 /* Create a new omp construct that deals with variable remapping. */
377 static struct gimplify_omp_ctx *
378 new_omp_context (enum omp_region_type region_type)
380 struct gimplify_omp_ctx *c;
382 c = XCNEW (struct gimplify_omp_ctx);
383 c->outer_context = gimplify_omp_ctxp;
384 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
385 c->privatized_types = new hash_set<tree>;
386 c->location = input_location;
387 c->region_type = region_type;
388 if ((region_type & ORT_TASK) == 0)
389 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
390 else
391 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
393 return c;
396 /* Destroy an omp construct that deals with variable remapping. */
398 static void
399 delete_omp_context (struct gimplify_omp_ctx *c)
401 splay_tree_delete (c->variables);
402 delete c->privatized_types;
403 c->loop_iter_var.release ();
404 XDELETE (c);
407 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
408 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
410 /* Both gimplify the statement T and append it to *SEQ_P. This function
411 behaves exactly as gimplify_stmt, but you don't have to pass T as a
412 reference. */
414 void
415 gimplify_and_add (tree t, gimple_seq *seq_p)
417 gimplify_stmt (&t, seq_p);
420 /* Gimplify statement T into sequence *SEQ_P, and return the first
421 tuple in the sequence of generated tuples for this statement.
422 Return NULL if gimplifying T produced no tuples. */
424 static gimple *
425 gimplify_and_return_first (tree t, gimple_seq *seq_p)
427 gimple_stmt_iterator last = gsi_last (*seq_p);
429 gimplify_and_add (t, seq_p);
431 if (!gsi_end_p (last))
433 gsi_next (&last);
434 return gsi_stmt (last);
436 else
437 return gimple_seq_first_stmt (*seq_p);
440 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
441 LHS, or for a call argument. */
443 static bool
444 is_gimple_mem_rhs (tree t)
446 /* If we're dealing with a renamable type, either source or dest must be
447 a renamed variable. */
448 if (is_gimple_reg_type (TREE_TYPE (t)))
449 return is_gimple_val (t);
450 else
451 return is_gimple_val (t) || is_gimple_lvalue (t);
454 /* Return true if T is a CALL_EXPR or an expression that can be
455 assigned to a temporary. Note that this predicate should only be
456 used during gimplification. See the rationale for this in
457 gimplify_modify_expr. */
459 static bool
460 is_gimple_reg_rhs_or_call (tree t)
462 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
463 || TREE_CODE (t) == CALL_EXPR);
466 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
467 this predicate should only be used during gimplification. See the
468 rationale for this in gimplify_modify_expr. */
470 static bool
471 is_gimple_mem_rhs_or_call (tree t)
473 /* If we're dealing with a renamable type, either source or dest must be
474 a renamed variable. */
475 if (is_gimple_reg_type (TREE_TYPE (t)))
476 return is_gimple_val (t);
477 else
478 return (is_gimple_val (t) || is_gimple_lvalue (t)
479 || TREE_CODE (t) == CALL_EXPR);
482 /* Create a temporary with a name derived from VAL. Subroutine of
483 lookup_tmp_var; nobody else should call this function. */
485 static inline tree
486 create_tmp_from_val (tree val)
488 /* Drop all qualifiers and address-space information from the value type. */
489 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
490 tree var = create_tmp_var (type, get_name (val));
491 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
492 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
493 DECL_GIMPLE_REG_P (var) = 1;
494 return var;
497 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
498 an existing expression temporary. */
500 static tree
501 lookup_tmp_var (tree val, bool is_formal)
503 tree ret;
505 /* If not optimizing, never really reuse a temporary. local-alloc
506 won't allocate any variable that is used in more than one basic
507 block, which means it will go into memory, causing much extra
508 work in reload and final and poorer code generation, outweighing
509 the extra memory allocation here. */
510 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
511 ret = create_tmp_from_val (val);
512 else
514 elt_t elt, *elt_p;
515 elt_t **slot;
517 elt.val = val;
518 if (!gimplify_ctxp->temp_htab)
519 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
520 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
521 if (*slot == NULL)
523 elt_p = XNEW (elt_t);
524 elt_p->val = val;
525 elt_p->temp = ret = create_tmp_from_val (val);
526 *slot = elt_p;
528 else
530 elt_p = *slot;
531 ret = elt_p->temp;
535 return ret;
538 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
540 static tree
541 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
542 bool is_formal)
544 tree t, mod;
546 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
547 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
548 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
549 fb_rvalue);
551 if (gimplify_ctxp->into_ssa
552 && is_gimple_reg_type (TREE_TYPE (val)))
553 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
554 else
555 t = lookup_tmp_var (val, is_formal);
557 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
559 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
561 /* gimplify_modify_expr might want to reduce this further. */
562 gimplify_and_add (mod, pre_p);
563 ggc_free (mod);
565 return t;
568 /* Return a formal temporary variable initialized with VAL. PRE_P is as
569 in gimplify_expr. Only use this function if:
571 1) The value of the unfactored expression represented by VAL will not
572 change between the initialization and use of the temporary, and
573 2) The temporary will not be otherwise modified.
575 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
576 and #2 means it is inappropriate for && temps.
578 For other cases, use get_initialized_tmp_var instead. */
580 tree
581 get_formal_tmp_var (tree val, gimple_seq *pre_p)
583 return internal_get_tmp_var (val, pre_p, NULL, true);
586 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
587 are as in gimplify_expr. */
589 tree
590 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
592 return internal_get_tmp_var (val, pre_p, post_p, false);
595 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
596 generate debug info for them; otherwise don't. */
598 void
599 declare_vars (tree vars, gimple *gs, bool debug_info)
601 tree last = vars;
602 if (last)
604 tree temps, block;
606 gbind *scope = as_a <gbind *> (gs);
608 temps = nreverse (last);
610 block = gimple_bind_block (scope);
611 gcc_assert (!block || TREE_CODE (block) == BLOCK);
612 if (!block || !debug_info)
614 DECL_CHAIN (last) = gimple_bind_vars (scope);
615 gimple_bind_set_vars (scope, temps);
617 else
619 /* We need to attach the nodes both to the BIND_EXPR and to its
620 associated BLOCK for debugging purposes. The key point here
621 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
622 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
623 if (BLOCK_VARS (block))
624 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
625 else
627 gimple_bind_set_vars (scope,
628 chainon (gimple_bind_vars (scope), temps));
629 BLOCK_VARS (block) = temps;
635 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
636 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
637 no such upper bound can be obtained. */
639 static void
640 force_constant_size (tree var)
642 /* The only attempt we make is by querying the maximum size of objects
643 of the variable's type. */
645 HOST_WIDE_INT max_size;
647 gcc_assert (TREE_CODE (var) == VAR_DECL);
649 max_size = max_int_size_in_bytes (TREE_TYPE (var));
651 gcc_assert (max_size >= 0);
653 DECL_SIZE_UNIT (var)
654 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
655 DECL_SIZE (var)
656 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
659 /* Push the temporary variable TMP into the current binding. */
661 void
662 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
664 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
666 /* Later processing assumes that the object size is constant, which might
667 not be true at this point. Force the use of a constant upper bound in
668 this case. */
669 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
670 force_constant_size (tmp);
672 DECL_CONTEXT (tmp) = fn->decl;
673 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
675 record_vars_into (tmp, fn->decl);
678 /* Push the temporary variable TMP into the current binding. */
680 void
681 gimple_add_tmp_var (tree tmp)
683 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
685 /* Later processing assumes that the object size is constant, which might
686 not be true at this point. Force the use of a constant upper bound in
687 this case. */
688 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
689 force_constant_size (tmp);
691 DECL_CONTEXT (tmp) = current_function_decl;
692 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
694 if (gimplify_ctxp)
696 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
697 gimplify_ctxp->temps = tmp;
699 /* Mark temporaries local within the nearest enclosing parallel. */
700 if (gimplify_omp_ctxp)
702 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
703 while (ctx
704 && (ctx->region_type == ORT_WORKSHARE
705 || ctx->region_type == ORT_SIMD
706 || ctx->region_type == ORT_ACC))
707 ctx = ctx->outer_context;
708 if (ctx)
709 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
712 else if (cfun)
713 record_vars (tmp);
714 else
716 gimple_seq body_seq;
718 /* This case is for nested functions. We need to expose the locals
719 they create. */
720 body_seq = gimple_body (current_function_decl);
721 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
727 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
728 nodes that are referenced more than once in GENERIC functions. This is
729 necessary because gimplification (translation into GIMPLE) is performed
730 by modifying tree nodes in-place, so gimplication of a shared node in a
731 first context could generate an invalid GIMPLE form in a second context.
733 This is achieved with a simple mark/copy/unmark algorithm that walks the
734 GENERIC representation top-down, marks nodes with TREE_VISITED the first
735 time it encounters them, duplicates them if they already have TREE_VISITED
736 set, and finally removes the TREE_VISITED marks it has set.
738 The algorithm works only at the function level, i.e. it generates a GENERIC
739 representation of a function with no nodes shared within the function when
740 passed a GENERIC function (except for nodes that are allowed to be shared).
742 At the global level, it is also necessary to unshare tree nodes that are
743 referenced in more than one function, for the same aforementioned reason.
744 This requires some cooperation from the front-end. There are 2 strategies:
746 1. Manual unsharing. The front-end needs to call unshare_expr on every
747 expression that might end up being shared across functions.
749 2. Deep unsharing. This is an extension of regular unsharing. Instead
750 of calling unshare_expr on expressions that might be shared across
751 functions, the front-end pre-marks them with TREE_VISITED. This will
752 ensure that they are unshared on the first reference within functions
753 when the regular unsharing algorithm runs. The counterpart is that
754 this algorithm must look deeper than for manual unsharing, which is
755 specified by LANG_HOOKS_DEEP_UNSHARING.
757 If there are only few specific cases of node sharing across functions, it is
758 probably easier for a front-end to unshare the expressions manually. On the
759 contrary, if the expressions generated at the global level are as widespread
760 as expressions generated within functions, deep unsharing is very likely the
761 way to go. */
763 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
764 These nodes model computations that must be done once. If we were to
765 unshare something like SAVE_EXPR(i++), the gimplification process would
766 create wrong code. However, if DATA is non-null, it must hold a pointer
767 set that is used to unshare the subtrees of these nodes. */
769 static tree
770 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
772 tree t = *tp;
773 enum tree_code code = TREE_CODE (t);
775 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
776 copy their subtrees if we can make sure to do it only once. */
777 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
779 if (data && !((hash_set<tree> *)data)->add (t))
781 else
782 *walk_subtrees = 0;
785 /* Stop at types, decls, constants like copy_tree_r. */
786 else if (TREE_CODE_CLASS (code) == tcc_type
787 || TREE_CODE_CLASS (code) == tcc_declaration
788 || TREE_CODE_CLASS (code) == tcc_constant
789 /* We can't do anything sensible with a BLOCK used as an
790 expression, but we also can't just die when we see it
791 because of non-expression uses. So we avert our eyes
792 and cross our fingers. Silly Java. */
793 || code == BLOCK)
794 *walk_subtrees = 0;
796 /* Cope with the statement expression extension. */
797 else if (code == STATEMENT_LIST)
800 /* Leave the bulk of the work to copy_tree_r itself. */
801 else
802 copy_tree_r (tp, walk_subtrees, NULL);
804 return NULL_TREE;
807 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
808 If *TP has been visited already, then *TP is deeply copied by calling
809 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
811 static tree
812 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
814 tree t = *tp;
815 enum tree_code code = TREE_CODE (t);
817 /* Skip types, decls, and constants. But we do want to look at their
818 types and the bounds of types. Mark them as visited so we properly
819 unmark their subtrees on the unmark pass. If we've already seen them,
820 don't look down further. */
821 if (TREE_CODE_CLASS (code) == tcc_type
822 || TREE_CODE_CLASS (code) == tcc_declaration
823 || TREE_CODE_CLASS (code) == tcc_constant)
825 if (TREE_VISITED (t))
826 *walk_subtrees = 0;
827 else
828 TREE_VISITED (t) = 1;
831 /* If this node has been visited already, unshare it and don't look
832 any deeper. */
833 else if (TREE_VISITED (t))
835 walk_tree (tp, mostly_copy_tree_r, data, NULL);
836 *walk_subtrees = 0;
839 /* Otherwise, mark the node as visited and keep looking. */
840 else
841 TREE_VISITED (t) = 1;
843 return NULL_TREE;
846 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
847 copy_if_shared_r callback unmodified. */
849 static inline void
850 copy_if_shared (tree *tp, void *data)
852 walk_tree (tp, copy_if_shared_r, data, NULL);
855 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
856 any nested functions. */
858 static void
859 unshare_body (tree fndecl)
861 struct cgraph_node *cgn = cgraph_node::get (fndecl);
862 /* If the language requires deep unsharing, we need a pointer set to make
863 sure we don't repeatedly unshare subtrees of unshareable nodes. */
864 hash_set<tree> *visited
865 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
867 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
868 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
869 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
871 delete visited;
873 if (cgn)
874 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
875 unshare_body (cgn->decl);
878 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
879 Subtrees are walked until the first unvisited node is encountered. */
881 static tree
882 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
884 tree t = *tp;
886 /* If this node has been visited, unmark it and keep looking. */
887 if (TREE_VISITED (t))
888 TREE_VISITED (t) = 0;
890 /* Otherwise, don't look any deeper. */
891 else
892 *walk_subtrees = 0;
894 return NULL_TREE;
897 /* Unmark the visited trees rooted at *TP. */
899 static inline void
900 unmark_visited (tree *tp)
902 walk_tree (tp, unmark_visited_r, NULL, NULL);
905 /* Likewise, but mark all trees as not visited. */
907 static void
908 unvisit_body (tree fndecl)
910 struct cgraph_node *cgn = cgraph_node::get (fndecl);
912 unmark_visited (&DECL_SAVED_TREE (fndecl));
913 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
914 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
916 if (cgn)
917 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
918 unvisit_body (cgn->decl);
921 /* Unconditionally make an unshared copy of EXPR. This is used when using
922 stored expressions which span multiple functions, such as BINFO_VTABLE,
923 as the normal unsharing process can't tell that they're shared. */
925 tree
926 unshare_expr (tree expr)
928 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
929 return expr;
932 /* Worker for unshare_expr_without_location. */
934 static tree
935 prune_expr_location (tree *tp, int *walk_subtrees, void *)
937 if (EXPR_P (*tp))
938 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
939 else
940 *walk_subtrees = 0;
941 return NULL_TREE;
944 /* Similar to unshare_expr but also prune all expression locations
945 from EXPR. */
947 tree
948 unshare_expr_without_location (tree expr)
950 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
951 if (EXPR_P (expr))
952 walk_tree (&expr, prune_expr_location, NULL, NULL);
953 return expr;
956 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
957 contain statements and have a value. Assign its value to a temporary
958 and give it void_type_node. Return the temporary, or NULL_TREE if
959 WRAPPER was already void. */
961 tree
962 voidify_wrapper_expr (tree wrapper, tree temp)
964 tree type = TREE_TYPE (wrapper);
965 if (type && !VOID_TYPE_P (type))
967 tree *p;
969 /* Set p to point to the body of the wrapper. Loop until we find
970 something that isn't a wrapper. */
971 for (p = &wrapper; p && *p; )
973 switch (TREE_CODE (*p))
975 case BIND_EXPR:
976 TREE_SIDE_EFFECTS (*p) = 1;
977 TREE_TYPE (*p) = void_type_node;
978 /* For a BIND_EXPR, the body is operand 1. */
979 p = &BIND_EXPR_BODY (*p);
980 break;
982 case CLEANUP_POINT_EXPR:
983 case TRY_FINALLY_EXPR:
984 case TRY_CATCH_EXPR:
985 TREE_SIDE_EFFECTS (*p) = 1;
986 TREE_TYPE (*p) = void_type_node;
987 p = &TREE_OPERAND (*p, 0);
988 break;
990 case STATEMENT_LIST:
992 tree_stmt_iterator i = tsi_last (*p);
993 TREE_SIDE_EFFECTS (*p) = 1;
994 TREE_TYPE (*p) = void_type_node;
995 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
997 break;
999 case COMPOUND_EXPR:
1000 /* Advance to the last statement. Set all container types to
1001 void. */
1002 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1004 TREE_SIDE_EFFECTS (*p) = 1;
1005 TREE_TYPE (*p) = void_type_node;
1007 break;
1009 case TRANSACTION_EXPR:
1010 TREE_SIDE_EFFECTS (*p) = 1;
1011 TREE_TYPE (*p) = void_type_node;
1012 p = &TRANSACTION_EXPR_BODY (*p);
1013 break;
1015 default:
1016 /* Assume that any tree upon which voidify_wrapper_expr is
1017 directly called is a wrapper, and that its body is op0. */
1018 if (p == &wrapper)
1020 TREE_SIDE_EFFECTS (*p) = 1;
1021 TREE_TYPE (*p) = void_type_node;
1022 p = &TREE_OPERAND (*p, 0);
1023 break;
1025 goto out;
1029 out:
1030 if (p == NULL || IS_EMPTY_STMT (*p))
1031 temp = NULL_TREE;
1032 else if (temp)
1034 /* The wrapper is on the RHS of an assignment that we're pushing
1035 down. */
1036 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1037 || TREE_CODE (temp) == MODIFY_EXPR);
1038 TREE_OPERAND (temp, 1) = *p;
1039 *p = temp;
1041 else
1043 temp = create_tmp_var (type, "retval");
1044 *p = build2 (INIT_EXPR, type, temp, *p);
1047 return temp;
1050 return NULL_TREE;
1053 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1054 a temporary through which they communicate. */
1056 static void
1057 build_stack_save_restore (gcall **save, gcall **restore)
1059 tree tmp_var;
1061 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1062 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1063 gimple_call_set_lhs (*save, tmp_var);
1065 *restore
1066 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1067 1, tmp_var);
1070 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1072 static enum gimplify_status
1073 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1075 tree bind_expr = *expr_p;
1076 bool old_save_stack = gimplify_ctxp->save_stack;
1077 tree t;
1078 gbind *bind_stmt;
1079 gimple_seq body, cleanup;
1080 gcall *stack_save;
1081 location_t start_locus = 0, end_locus = 0;
1082 tree ret_clauses = NULL;
1084 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1086 /* Mark variables seen in this bind expr. */
1087 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1089 if (TREE_CODE (t) == VAR_DECL)
1091 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1093 /* Mark variable as local. */
1094 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1095 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1096 || splay_tree_lookup (ctx->variables,
1097 (splay_tree_key) t) == NULL))
1099 if (ctx->region_type == ORT_SIMD
1100 && TREE_ADDRESSABLE (t)
1101 && !TREE_STATIC (t))
1102 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1103 else
1104 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1107 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1109 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1110 cfun->has_local_explicit_reg_vars = true;
1113 /* Preliminarily mark non-addressed complex variables as eligible
1114 for promotion to gimple registers. We'll transform their uses
1115 as we find them. */
1116 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1117 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1118 && !TREE_THIS_VOLATILE (t)
1119 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1120 && !needs_to_live_in_memory (t))
1121 DECL_GIMPLE_REG_P (t) = 1;
1124 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1125 BIND_EXPR_BLOCK (bind_expr));
1126 gimple_push_bind_expr (bind_stmt);
1128 gimplify_ctxp->save_stack = false;
1130 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1131 body = NULL;
1132 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1133 gimple_bind_set_body (bind_stmt, body);
1135 /* Source location wise, the cleanup code (stack_restore and clobbers)
1136 belongs to the end of the block, so propagate what we have. The
1137 stack_save operation belongs to the beginning of block, which we can
1138 infer from the bind_expr directly if the block has no explicit
1139 assignment. */
1140 if (BIND_EXPR_BLOCK (bind_expr))
1142 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1143 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1145 if (start_locus == 0)
1146 start_locus = EXPR_LOCATION (bind_expr);
1148 cleanup = NULL;
1149 stack_save = NULL;
1150 if (gimplify_ctxp->save_stack)
1152 gcall *stack_restore;
1154 /* Save stack on entry and restore it on exit. Add a try_finally
1155 block to achieve this. */
1156 build_stack_save_restore (&stack_save, &stack_restore);
1158 gimple_set_location (stack_save, start_locus);
1159 gimple_set_location (stack_restore, end_locus);
1161 gimplify_seq_add_stmt (&cleanup, stack_restore);
1164 /* Add clobbers for all variables that go out of scope. */
1165 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1167 if (TREE_CODE (t) == VAR_DECL
1168 && !is_global_var (t)
1169 && DECL_CONTEXT (t) == current_function_decl
1170 && !DECL_HARD_REGISTER (t)
1171 && !TREE_THIS_VOLATILE (t)
1172 && !DECL_HAS_VALUE_EXPR_P (t)
1173 /* Only care for variables that have to be in memory. Others
1174 will be rewritten into SSA names, hence moved to the top-level. */
1175 && !is_gimple_reg (t)
1176 && flag_stack_reuse != SR_NONE)
1178 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1179 gimple *clobber_stmt;
1180 TREE_THIS_VOLATILE (clobber) = 1;
1181 clobber_stmt = gimple_build_assign (t, clobber);
1182 gimple_set_location (clobber_stmt, end_locus);
1183 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1185 if (flag_openacc && oacc_declare_returns != NULL)
1187 tree *c = oacc_declare_returns->get (t);
1188 if (c != NULL)
1190 if (ret_clauses)
1191 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1193 ret_clauses = *c;
1195 oacc_declare_returns->remove (t);
1197 if (oacc_declare_returns->elements () == 0)
1199 delete oacc_declare_returns;
1200 oacc_declare_returns = NULL;
1207 if (ret_clauses)
1209 gomp_target *stmt;
1210 gimple_stmt_iterator si = gsi_start (cleanup);
1212 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1213 ret_clauses);
1214 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1217 if (cleanup)
1219 gtry *gs;
1220 gimple_seq new_body;
1222 new_body = NULL;
1223 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1224 GIMPLE_TRY_FINALLY);
1226 if (stack_save)
1227 gimplify_seq_add_stmt (&new_body, stack_save);
1228 gimplify_seq_add_stmt (&new_body, gs);
1229 gimple_bind_set_body (bind_stmt, new_body);
1232 gimplify_ctxp->save_stack = old_save_stack;
1233 gimple_pop_bind_expr ();
1235 gimplify_seq_add_stmt (pre_p, bind_stmt);
1237 if (temp)
1239 *expr_p = temp;
1240 return GS_OK;
1243 *expr_p = NULL_TREE;
1244 return GS_ALL_DONE;
1247 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1248 GIMPLE value, it is assigned to a new temporary and the statement is
1249 re-written to return the temporary.
1251 PRE_P points to the sequence where side effects that must happen before
1252 STMT should be stored. */
1254 static enum gimplify_status
1255 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1257 greturn *ret;
1258 tree ret_expr = TREE_OPERAND (stmt, 0);
1259 tree result_decl, result;
1261 if (ret_expr == error_mark_node)
1262 return GS_ERROR;
1264 /* Implicit _Cilk_sync must be inserted right before any return statement
1265 if there is a _Cilk_spawn in the function. If the user has provided a
1266 _Cilk_sync, the optimizer should remove this duplicate one. */
1267 if (fn_contains_cilk_spawn_p (cfun))
1269 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1270 gimplify_and_add (impl_sync, pre_p);
1273 if (!ret_expr
1274 || TREE_CODE (ret_expr) == RESULT_DECL
1275 || ret_expr == error_mark_node)
1277 greturn *ret = gimple_build_return (ret_expr);
1278 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1279 gimplify_seq_add_stmt (pre_p, ret);
1280 return GS_ALL_DONE;
1283 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1284 result_decl = NULL_TREE;
1285 else
1287 result_decl = TREE_OPERAND (ret_expr, 0);
1289 /* See through a return by reference. */
1290 if (TREE_CODE (result_decl) == INDIRECT_REF)
1291 result_decl = TREE_OPERAND (result_decl, 0);
1293 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1294 || TREE_CODE (ret_expr) == INIT_EXPR)
1295 && TREE_CODE (result_decl) == RESULT_DECL);
1298 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1299 Recall that aggregate_value_p is FALSE for any aggregate type that is
1300 returned in registers. If we're returning values in registers, then
1301 we don't want to extend the lifetime of the RESULT_DECL, particularly
1302 across another call. In addition, for those aggregates for which
1303 hard_function_value generates a PARALLEL, we'll die during normal
1304 expansion of structure assignments; there's special code in expand_return
1305 to handle this case that does not exist in expand_expr. */
1306 if (!result_decl)
1307 result = NULL_TREE;
1308 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1310 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1312 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1313 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1314 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1315 should be effectively allocated by the caller, i.e. all calls to
1316 this function must be subject to the Return Slot Optimization. */
1317 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1318 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1320 result = result_decl;
1322 else if (gimplify_ctxp->return_temp)
1323 result = gimplify_ctxp->return_temp;
1324 else
1326 result = create_tmp_reg (TREE_TYPE (result_decl));
1328 /* ??? With complex control flow (usually involving abnormal edges),
1329 we can wind up warning about an uninitialized value for this. Due
1330 to how this variable is constructed and initialized, this is never
1331 true. Give up and never warn. */
1332 TREE_NO_WARNING (result) = 1;
1334 gimplify_ctxp->return_temp = result;
1337 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1338 Then gimplify the whole thing. */
1339 if (result != result_decl)
1340 TREE_OPERAND (ret_expr, 0) = result;
1342 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1344 ret = gimple_build_return (result);
1345 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1346 gimplify_seq_add_stmt (pre_p, ret);
1348 return GS_ALL_DONE;
1351 /* Gimplify a variable-length array DECL. */
1353 static void
1354 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1356 /* This is a variable-sized decl. Simplify its size and mark it
1357 for deferred expansion. */
1358 tree t, addr, ptr_type;
1360 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1361 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1363 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1364 if (DECL_HAS_VALUE_EXPR_P (decl))
1365 return;
1367 /* All occurrences of this decl in final gimplified code will be
1368 replaced by indirection. Setting DECL_VALUE_EXPR does two
1369 things: First, it lets the rest of the gimplifier know what
1370 replacement to use. Second, it lets the debug info know
1371 where to find the value. */
1372 ptr_type = build_pointer_type (TREE_TYPE (decl));
1373 addr = create_tmp_var (ptr_type, get_name (decl));
1374 DECL_IGNORED_P (addr) = 0;
1375 t = build_fold_indirect_ref (addr);
1376 TREE_THIS_NOTRAP (t) = 1;
1377 SET_DECL_VALUE_EXPR (decl, t);
1378 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1380 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1381 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1382 size_int (DECL_ALIGN (decl)));
1383 /* The call has been built for a variable-sized object. */
1384 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1385 t = fold_convert (ptr_type, t);
1386 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1388 gimplify_and_add (t, seq_p);
1390 /* Indicate that we need to restore the stack level when the
1391 enclosing BIND_EXPR is exited. */
1392 gimplify_ctxp->save_stack = true;
1395 /* A helper function to be called via walk_tree. Mark all labels under *TP
1396 as being forced. To be called for DECL_INITIAL of static variables. */
1398 static tree
1399 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1401 if (TYPE_P (*tp))
1402 *walk_subtrees = 0;
1403 if (TREE_CODE (*tp) == LABEL_DECL)
1404 FORCED_LABEL (*tp) = 1;
1406 return NULL_TREE;
1409 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1410 and initialization explicit. */
1412 static enum gimplify_status
1413 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1415 tree stmt = *stmt_p;
1416 tree decl = DECL_EXPR_DECL (stmt);
1418 *stmt_p = NULL_TREE;
1420 if (TREE_TYPE (decl) == error_mark_node)
1421 return GS_ERROR;
1423 if ((TREE_CODE (decl) == TYPE_DECL
1424 || TREE_CODE (decl) == VAR_DECL)
1425 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1426 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1428 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1429 in case its size expressions contain problematic nodes like CALL_EXPR. */
1430 if (TREE_CODE (decl) == TYPE_DECL
1431 && DECL_ORIGINAL_TYPE (decl)
1432 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1433 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1435 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1437 tree init = DECL_INITIAL (decl);
1439 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1440 || (!TREE_STATIC (decl)
1441 && flag_stack_check == GENERIC_STACK_CHECK
1442 && compare_tree_int (DECL_SIZE_UNIT (decl),
1443 STACK_CHECK_MAX_VAR_SIZE) > 0))
1444 gimplify_vla_decl (decl, seq_p);
1446 /* Some front ends do not explicitly declare all anonymous
1447 artificial variables. We compensate here by declaring the
1448 variables, though it would be better if the front ends would
1449 explicitly declare them. */
1450 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1451 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1452 gimple_add_tmp_var (decl);
1454 if (init && init != error_mark_node)
1456 if (!TREE_STATIC (decl))
1458 DECL_INITIAL (decl) = NULL_TREE;
1459 init = build2 (INIT_EXPR, void_type_node, decl, init);
1460 gimplify_and_add (init, seq_p);
1461 ggc_free (init);
1463 else
1464 /* We must still examine initializers for static variables
1465 as they may contain a label address. */
1466 walk_tree (&init, force_labels_r, NULL, NULL);
1470 return GS_ALL_DONE;
1473 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1474 and replacing the LOOP_EXPR with goto, but if the loop contains an
1475 EXIT_EXPR, we need to append a label for it to jump to. */
1477 static enum gimplify_status
1478 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1480 tree saved_label = gimplify_ctxp->exit_label;
1481 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1483 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1485 gimplify_ctxp->exit_label = NULL_TREE;
1487 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1489 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1491 if (gimplify_ctxp->exit_label)
1492 gimplify_seq_add_stmt (pre_p,
1493 gimple_build_label (gimplify_ctxp->exit_label));
1495 gimplify_ctxp->exit_label = saved_label;
1497 *expr_p = NULL;
1498 return GS_ALL_DONE;
1501 /* Gimplify a statement list onto a sequence. These may be created either
1502 by an enlightened front-end, or by shortcut_cond_expr. */
1504 static enum gimplify_status
1505 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1507 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1509 tree_stmt_iterator i = tsi_start (*expr_p);
1511 while (!tsi_end_p (i))
1513 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1514 tsi_delink (&i);
1517 if (temp)
1519 *expr_p = temp;
1520 return GS_OK;
1523 return GS_ALL_DONE;
1527 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1528 branch to. */
1530 static enum gimplify_status
1531 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1533 tree switch_expr = *expr_p;
1534 gimple_seq switch_body_seq = NULL;
1535 enum gimplify_status ret;
1536 tree index_type = TREE_TYPE (switch_expr);
1537 if (index_type == NULL_TREE)
1538 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1540 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1541 fb_rvalue);
1542 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1543 return ret;
1545 if (SWITCH_BODY (switch_expr))
1547 vec<tree> labels;
1548 vec<tree> saved_labels;
1549 tree default_case = NULL_TREE;
1550 gswitch *switch_stmt;
1552 /* If someone can be bothered to fill in the labels, they can
1553 be bothered to null out the body too. */
1554 gcc_assert (!SWITCH_LABELS (switch_expr));
1556 /* Save old labels, get new ones from body, then restore the old
1557 labels. Save all the things from the switch body to append after. */
1558 saved_labels = gimplify_ctxp->case_labels;
1559 gimplify_ctxp->case_labels.create (8);
1561 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1562 labels = gimplify_ctxp->case_labels;
1563 gimplify_ctxp->case_labels = saved_labels;
1565 preprocess_case_label_vec_for_gimple (labels, index_type,
1566 &default_case);
1568 if (!default_case)
1570 glabel *new_default;
1572 default_case
1573 = build_case_label (NULL_TREE, NULL_TREE,
1574 create_artificial_label (UNKNOWN_LOCATION));
1575 new_default = gimple_build_label (CASE_LABEL (default_case));
1576 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1579 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
1580 default_case, labels);
1581 gimplify_seq_add_stmt (pre_p, switch_stmt);
1582 gimplify_seq_add_seq (pre_p, switch_body_seq);
1583 labels.release ();
1585 else
1586 gcc_assert (SWITCH_LABELS (switch_expr));
1588 return GS_ALL_DONE;
1591 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1593 static enum gimplify_status
1594 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1596 struct gimplify_ctx *ctxp;
1597 glabel *label_stmt;
1599 /* Invalid programs can play Duff's Device type games with, for example,
1600 #pragma omp parallel. At least in the C front end, we don't
1601 detect such invalid branches until after gimplification, in the
1602 diagnose_omp_blocks pass. */
1603 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1604 if (ctxp->case_labels.exists ())
1605 break;
1607 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
1608 ctxp->case_labels.safe_push (*expr_p);
1609 gimplify_seq_add_stmt (pre_p, label_stmt);
1611 return GS_ALL_DONE;
1614 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1615 if necessary. */
1617 tree
1618 build_and_jump (tree *label_p)
1620 if (label_p == NULL)
1621 /* If there's nowhere to jump, just fall through. */
1622 return NULL_TREE;
1624 if (*label_p == NULL_TREE)
1626 tree label = create_artificial_label (UNKNOWN_LOCATION);
1627 *label_p = label;
1630 return build1 (GOTO_EXPR, void_type_node, *label_p);
1633 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1634 This also involves building a label to jump to and communicating it to
1635 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1637 static enum gimplify_status
1638 gimplify_exit_expr (tree *expr_p)
1640 tree cond = TREE_OPERAND (*expr_p, 0);
1641 tree expr;
1643 expr = build_and_jump (&gimplify_ctxp->exit_label);
1644 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1645 *expr_p = expr;
1647 return GS_OK;
1650 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1651 different from its canonical type, wrap the whole thing inside a
1652 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1653 type.
1655 The canonical type of a COMPONENT_REF is the type of the field being
1656 referenced--unless the field is a bit-field which can be read directly
1657 in a smaller mode, in which case the canonical type is the
1658 sign-appropriate type corresponding to that mode. */
1660 static void
1661 canonicalize_component_ref (tree *expr_p)
1663 tree expr = *expr_p;
1664 tree type;
1666 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1668 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1669 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1670 else
1671 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1673 /* One could argue that all the stuff below is not necessary for
1674 the non-bitfield case and declare it a FE error if type
1675 adjustment would be needed. */
1676 if (TREE_TYPE (expr) != type)
1678 #ifdef ENABLE_TYPES_CHECKING
1679 tree old_type = TREE_TYPE (expr);
1680 #endif
1681 int type_quals;
1683 /* We need to preserve qualifiers and propagate them from
1684 operand 0. */
1685 type_quals = TYPE_QUALS (type)
1686 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1687 if (TYPE_QUALS (type) != type_quals)
1688 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1690 /* Set the type of the COMPONENT_REF to the underlying type. */
1691 TREE_TYPE (expr) = type;
1693 #ifdef ENABLE_TYPES_CHECKING
1694 /* It is now a FE error, if the conversion from the canonical
1695 type to the original expression type is not useless. */
1696 gcc_assert (useless_type_conversion_p (old_type, type));
1697 #endif
1701 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1702 to foo, embed that change in the ADDR_EXPR by converting
1703 T array[U];
1704 (T *)&array
1706 &array[L]
1707 where L is the lower bound. For simplicity, only do this for constant
1708 lower bound.
1709 The constraint is that the type of &array[L] is trivially convertible
1710 to T *. */
1712 static void
1713 canonicalize_addr_expr (tree *expr_p)
1715 tree expr = *expr_p;
1716 tree addr_expr = TREE_OPERAND (expr, 0);
1717 tree datype, ddatype, pddatype;
1719 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1720 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1721 || TREE_CODE (addr_expr) != ADDR_EXPR)
1722 return;
1724 /* The addr_expr type should be a pointer to an array. */
1725 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1726 if (TREE_CODE (datype) != ARRAY_TYPE)
1727 return;
1729 /* The pointer to element type shall be trivially convertible to
1730 the expression pointer type. */
1731 ddatype = TREE_TYPE (datype);
1732 pddatype = build_pointer_type (ddatype);
1733 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1734 pddatype))
1735 return;
1737 /* The lower bound and element sizes must be constant. */
1738 if (!TYPE_SIZE_UNIT (ddatype)
1739 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1740 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1741 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1742 return;
1744 /* All checks succeeded. Build a new node to merge the cast. */
1745 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1746 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1747 NULL_TREE, NULL_TREE);
1748 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1750 /* We can have stripped a required restrict qualifier above. */
1751 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1752 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1755 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1756 underneath as appropriate. */
1758 static enum gimplify_status
1759 gimplify_conversion (tree *expr_p)
1761 location_t loc = EXPR_LOCATION (*expr_p);
1762 gcc_assert (CONVERT_EXPR_P (*expr_p));
1764 /* Then strip away all but the outermost conversion. */
1765 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1767 /* And remove the outermost conversion if it's useless. */
1768 if (tree_ssa_useless_type_conversion (*expr_p))
1769 *expr_p = TREE_OPERAND (*expr_p, 0);
1771 /* If we still have a conversion at the toplevel,
1772 then canonicalize some constructs. */
1773 if (CONVERT_EXPR_P (*expr_p))
1775 tree sub = TREE_OPERAND (*expr_p, 0);
1777 /* If a NOP conversion is changing the type of a COMPONENT_REF
1778 expression, then canonicalize its type now in order to expose more
1779 redundant conversions. */
1780 if (TREE_CODE (sub) == COMPONENT_REF)
1781 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1783 /* If a NOP conversion is changing a pointer to array of foo
1784 to a pointer to foo, embed that change in the ADDR_EXPR. */
1785 else if (TREE_CODE (sub) == ADDR_EXPR)
1786 canonicalize_addr_expr (expr_p);
1789 /* If we have a conversion to a non-register type force the
1790 use of a VIEW_CONVERT_EXPR instead. */
1791 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1792 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1793 TREE_OPERAND (*expr_p, 0));
1795 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
1796 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
1797 TREE_SET_CODE (*expr_p, NOP_EXPR);
1799 return GS_OK;
1802 /* Nonlocal VLAs seen in the current function. */
1803 static hash_set<tree> *nonlocal_vlas;
1805 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1806 static tree nonlocal_vla_vars;
1808 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1809 DECL_VALUE_EXPR, and it's worth re-examining things. */
1811 static enum gimplify_status
1812 gimplify_var_or_parm_decl (tree *expr_p)
1814 tree decl = *expr_p;
1816 /* ??? If this is a local variable, and it has not been seen in any
1817 outer BIND_EXPR, then it's probably the result of a duplicate
1818 declaration, for which we've already issued an error. It would
1819 be really nice if the front end wouldn't leak these at all.
1820 Currently the only known culprit is C++ destructors, as seen
1821 in g++.old-deja/g++.jason/binding.C. */
1822 if (TREE_CODE (decl) == VAR_DECL
1823 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1824 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1825 && decl_function_context (decl) == current_function_decl)
1827 gcc_assert (seen_error ());
1828 return GS_ERROR;
1831 /* When within an OMP context, notice uses of variables. */
1832 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1833 return GS_ALL_DONE;
1835 /* If the decl is an alias for another expression, substitute it now. */
1836 if (DECL_HAS_VALUE_EXPR_P (decl))
1838 tree value_expr = DECL_VALUE_EXPR (decl);
1840 /* For referenced nonlocal VLAs add a decl for debugging purposes
1841 to the current function. */
1842 if (TREE_CODE (decl) == VAR_DECL
1843 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1844 && nonlocal_vlas != NULL
1845 && TREE_CODE (value_expr) == INDIRECT_REF
1846 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1847 && decl_function_context (decl) != current_function_decl)
1849 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1850 while (ctx
1851 && (ctx->region_type == ORT_WORKSHARE
1852 || ctx->region_type == ORT_SIMD
1853 || ctx->region_type == ORT_ACC))
1854 ctx = ctx->outer_context;
1855 if (!ctx && !nonlocal_vlas->add (decl))
1857 tree copy = copy_node (decl);
1859 lang_hooks.dup_lang_specific_decl (copy);
1860 SET_DECL_RTL (copy, 0);
1861 TREE_USED (copy) = 1;
1862 DECL_CHAIN (copy) = nonlocal_vla_vars;
1863 nonlocal_vla_vars = copy;
1864 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1865 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1869 *expr_p = unshare_expr (value_expr);
1870 return GS_OK;
1873 return GS_ALL_DONE;
1876 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1878 static void
1879 recalculate_side_effects (tree t)
1881 enum tree_code code = TREE_CODE (t);
1882 int len = TREE_OPERAND_LENGTH (t);
1883 int i;
1885 switch (TREE_CODE_CLASS (code))
1887 case tcc_expression:
1888 switch (code)
1890 case INIT_EXPR:
1891 case MODIFY_EXPR:
1892 case VA_ARG_EXPR:
1893 case PREDECREMENT_EXPR:
1894 case PREINCREMENT_EXPR:
1895 case POSTDECREMENT_EXPR:
1896 case POSTINCREMENT_EXPR:
1897 /* All of these have side-effects, no matter what their
1898 operands are. */
1899 return;
1901 default:
1902 break;
1904 /* Fall through. */
1906 case tcc_comparison: /* a comparison expression */
1907 case tcc_unary: /* a unary arithmetic expression */
1908 case tcc_binary: /* a binary arithmetic expression */
1909 case tcc_reference: /* a reference */
1910 case tcc_vl_exp: /* a function call */
1911 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1912 for (i = 0; i < len; ++i)
1914 tree op = TREE_OPERAND (t, i);
1915 if (op && TREE_SIDE_EFFECTS (op))
1916 TREE_SIDE_EFFECTS (t) = 1;
1918 break;
1920 case tcc_constant:
1921 /* No side-effects. */
1922 return;
1924 default:
1925 gcc_unreachable ();
1929 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1930 node *EXPR_P.
1932 compound_lval
1933 : min_lval '[' val ']'
1934 | min_lval '.' ID
1935 | compound_lval '[' val ']'
1936 | compound_lval '.' ID
1938 This is not part of the original SIMPLE definition, which separates
1939 array and member references, but it seems reasonable to handle them
1940 together. Also, this way we don't run into problems with union
1941 aliasing; gcc requires that for accesses through a union to alias, the
1942 union reference must be explicit, which was not always the case when we
1943 were splitting up array and member refs.
1945 PRE_P points to the sequence where side effects that must happen before
1946 *EXPR_P should be stored.
1948 POST_P points to the sequence where side effects that must happen after
1949 *EXPR_P should be stored. */
1951 static enum gimplify_status
1952 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1953 fallback_t fallback)
1955 tree *p;
1956 enum gimplify_status ret = GS_ALL_DONE, tret;
1957 int i;
1958 location_t loc = EXPR_LOCATION (*expr_p);
1959 tree expr = *expr_p;
1961 /* Create a stack of the subexpressions so later we can walk them in
1962 order from inner to outer. */
1963 auto_vec<tree, 10> expr_stack;
1965 /* We can handle anything that get_inner_reference can deal with. */
1966 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1968 restart:
1969 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1970 if (TREE_CODE (*p) == INDIRECT_REF)
1971 *p = fold_indirect_ref_loc (loc, *p);
1973 if (handled_component_p (*p))
1975 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1976 additional COMPONENT_REFs. */
1977 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1978 && gimplify_var_or_parm_decl (p) == GS_OK)
1979 goto restart;
1980 else
1981 break;
1983 expr_stack.safe_push (*p);
1986 gcc_assert (expr_stack.length ());
1988 /* Now EXPR_STACK is a stack of pointers to all the refs we've
1989 walked through and P points to the innermost expression.
1991 Java requires that we elaborated nodes in source order. That
1992 means we must gimplify the inner expression followed by each of
1993 the indices, in order. But we can't gimplify the inner
1994 expression until we deal with any variable bounds, sizes, or
1995 positions in order to deal with PLACEHOLDER_EXPRs.
1997 So we do this in three steps. First we deal with the annotations
1998 for any variables in the components, then we gimplify the base,
1999 then we gimplify any indices, from left to right. */
2000 for (i = expr_stack.length () - 1; i >= 0; i--)
2002 tree t = expr_stack[i];
2004 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2006 /* Gimplify the low bound and element type size and put them into
2007 the ARRAY_REF. If these values are set, they have already been
2008 gimplified. */
2009 if (TREE_OPERAND (t, 2) == NULL_TREE)
2011 tree low = unshare_expr (array_ref_low_bound (t));
2012 if (!is_gimple_min_invariant (low))
2014 TREE_OPERAND (t, 2) = low;
2015 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2016 post_p, is_gimple_reg,
2017 fb_rvalue);
2018 ret = MIN (ret, tret);
2021 else
2023 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2024 is_gimple_reg, fb_rvalue);
2025 ret = MIN (ret, tret);
2028 if (TREE_OPERAND (t, 3) == NULL_TREE)
2030 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2031 tree elmt_size = unshare_expr (array_ref_element_size (t));
2032 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2034 /* Divide the element size by the alignment of the element
2035 type (above). */
2036 elmt_size
2037 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2039 if (!is_gimple_min_invariant (elmt_size))
2041 TREE_OPERAND (t, 3) = elmt_size;
2042 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2043 post_p, is_gimple_reg,
2044 fb_rvalue);
2045 ret = MIN (ret, tret);
2048 else
2050 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2051 is_gimple_reg, fb_rvalue);
2052 ret = MIN (ret, tret);
2055 else if (TREE_CODE (t) == COMPONENT_REF)
2057 /* Set the field offset into T and gimplify it. */
2058 if (TREE_OPERAND (t, 2) == NULL_TREE)
2060 tree offset = unshare_expr (component_ref_field_offset (t));
2061 tree field = TREE_OPERAND (t, 1);
2062 tree factor
2063 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2065 /* Divide the offset by its alignment. */
2066 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2068 if (!is_gimple_min_invariant (offset))
2070 TREE_OPERAND (t, 2) = offset;
2071 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2072 post_p, is_gimple_reg,
2073 fb_rvalue);
2074 ret = MIN (ret, tret);
2077 else
2079 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2080 is_gimple_reg, fb_rvalue);
2081 ret = MIN (ret, tret);
2086 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2087 so as to match the min_lval predicate. Failure to do so may result
2088 in the creation of large aggregate temporaries. */
2089 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2090 fallback | fb_lvalue);
2091 ret = MIN (ret, tret);
2093 /* And finally, the indices and operands of ARRAY_REF. During this
2094 loop we also remove any useless conversions. */
2095 for (; expr_stack.length () > 0; )
2097 tree t = expr_stack.pop ();
2099 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2101 /* Gimplify the dimension. */
2102 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2104 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2105 is_gimple_val, fb_rvalue);
2106 ret = MIN (ret, tret);
2110 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2112 /* The innermost expression P may have originally had
2113 TREE_SIDE_EFFECTS set which would have caused all the outer
2114 expressions in *EXPR_P leading to P to also have had
2115 TREE_SIDE_EFFECTS set. */
2116 recalculate_side_effects (t);
2119 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2120 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2122 canonicalize_component_ref (expr_p);
2125 expr_stack.release ();
2127 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2129 return ret;
2132 /* Gimplify the self modifying expression pointed to by EXPR_P
2133 (++, --, +=, -=).
2135 PRE_P points to the list where side effects that must happen before
2136 *EXPR_P should be stored.
2138 POST_P points to the list where side effects that must happen after
2139 *EXPR_P should be stored.
2141 WANT_VALUE is nonzero iff we want to use the value of this expression
2142 in another expression.
2144 ARITH_TYPE is the type the computation should be performed in. */
2146 enum gimplify_status
2147 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2148 bool want_value, tree arith_type)
2150 enum tree_code code;
2151 tree lhs, lvalue, rhs, t1;
2152 gimple_seq post = NULL, *orig_post_p = post_p;
2153 bool postfix;
2154 enum tree_code arith_code;
2155 enum gimplify_status ret;
2156 location_t loc = EXPR_LOCATION (*expr_p);
2158 code = TREE_CODE (*expr_p);
2160 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2161 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2163 /* Prefix or postfix? */
2164 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2165 /* Faster to treat as prefix if result is not used. */
2166 postfix = want_value;
2167 else
2168 postfix = false;
2170 /* For postfix, make sure the inner expression's post side effects
2171 are executed after side effects from this expression. */
2172 if (postfix)
2173 post_p = &post;
2175 /* Add or subtract? */
2176 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2177 arith_code = PLUS_EXPR;
2178 else
2179 arith_code = MINUS_EXPR;
2181 /* Gimplify the LHS into a GIMPLE lvalue. */
2182 lvalue = TREE_OPERAND (*expr_p, 0);
2183 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2184 if (ret == GS_ERROR)
2185 return ret;
2187 /* Extract the operands to the arithmetic operation. */
2188 lhs = lvalue;
2189 rhs = TREE_OPERAND (*expr_p, 1);
2191 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2192 that as the result value and in the postqueue operation. */
2193 if (postfix)
2195 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2196 if (ret == GS_ERROR)
2197 return ret;
2199 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2202 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2203 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2205 rhs = convert_to_ptrofftype_loc (loc, rhs);
2206 if (arith_code == MINUS_EXPR)
2207 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2208 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2210 else
2211 t1 = fold_convert (TREE_TYPE (*expr_p),
2212 fold_build2 (arith_code, arith_type,
2213 fold_convert (arith_type, lhs),
2214 fold_convert (arith_type, rhs)));
2216 if (postfix)
2218 gimplify_assign (lvalue, t1, pre_p);
2219 gimplify_seq_add_seq (orig_post_p, post);
2220 *expr_p = lhs;
2221 return GS_ALL_DONE;
2223 else
2225 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2226 return GS_OK;
2230 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2232 static void
2233 maybe_with_size_expr (tree *expr_p)
2235 tree expr = *expr_p;
2236 tree type = TREE_TYPE (expr);
2237 tree size;
2239 /* If we've already wrapped this or the type is error_mark_node, we can't do
2240 anything. */
2241 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2242 || type == error_mark_node)
2243 return;
2245 /* If the size isn't known or is a constant, we have nothing to do. */
2246 size = TYPE_SIZE_UNIT (type);
2247 if (!size || TREE_CODE (size) == INTEGER_CST)
2248 return;
2250 /* Otherwise, make a WITH_SIZE_EXPR. */
2251 size = unshare_expr (size);
2252 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2253 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2256 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2257 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2258 the CALL_EXPR. */
2260 enum gimplify_status
2261 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2263 bool (*test) (tree);
2264 fallback_t fb;
2266 /* In general, we allow lvalues for function arguments to avoid
2267 extra overhead of copying large aggregates out of even larger
2268 aggregates into temporaries only to copy the temporaries to
2269 the argument list. Make optimizers happy by pulling out to
2270 temporaries those types that fit in registers. */
2271 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2272 test = is_gimple_val, fb = fb_rvalue;
2273 else
2275 test = is_gimple_lvalue, fb = fb_either;
2276 /* Also strip a TARGET_EXPR that would force an extra copy. */
2277 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2279 tree init = TARGET_EXPR_INITIAL (*arg_p);
2280 if (init
2281 && !VOID_TYPE_P (TREE_TYPE (init)))
2282 *arg_p = init;
2286 /* If this is a variable sized type, we must remember the size. */
2287 maybe_with_size_expr (arg_p);
2289 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2290 /* Make sure arguments have the same location as the function call
2291 itself. */
2292 protected_set_expr_location (*arg_p, call_location);
2294 /* There is a sequence point before a function call. Side effects in
2295 the argument list must occur before the actual call. So, when
2296 gimplifying arguments, force gimplify_expr to use an internal
2297 post queue which is then appended to the end of PRE_P. */
2298 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2301 /* Don't fold inside offloading or taskreg regions: it can break code by
2302 adding decl references that weren't in the source. We'll do it during
2303 omplower pass instead. */
2305 static bool
2306 maybe_fold_stmt (gimple_stmt_iterator *gsi)
2308 struct gimplify_omp_ctx *ctx;
2309 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2310 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
2311 return false;
2312 return fold_stmt (gsi);
2315 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2316 WANT_VALUE is true if the result of the call is desired. */
2318 static enum gimplify_status
2319 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2321 tree fndecl, parms, p, fnptrtype;
2322 enum gimplify_status ret;
2323 int i, nargs;
2324 gcall *call;
2325 bool builtin_va_start_p = false;
2326 location_t loc = EXPR_LOCATION (*expr_p);
2328 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2330 /* For reliable diagnostics during inlining, it is necessary that
2331 every call_expr be annotated with file and line. */
2332 if (! EXPR_HAS_LOCATION (*expr_p))
2333 SET_EXPR_LOCATION (*expr_p, input_location);
2335 /* Gimplify internal functions created in the FEs. */
2336 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2338 if (want_value)
2339 return GS_ALL_DONE;
2341 nargs = call_expr_nargs (*expr_p);
2342 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2343 auto_vec<tree> vargs (nargs);
2345 for (i = 0; i < nargs; i++)
2347 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2348 EXPR_LOCATION (*expr_p));
2349 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2351 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
2352 gimplify_seq_add_stmt (pre_p, call);
2353 return GS_ALL_DONE;
2356 /* This may be a call to a builtin function.
2358 Builtin function calls may be transformed into different
2359 (and more efficient) builtin function calls under certain
2360 circumstances. Unfortunately, gimplification can muck things
2361 up enough that the builtin expanders are not aware that certain
2362 transformations are still valid.
2364 So we attempt transformation/gimplification of the call before
2365 we gimplify the CALL_EXPR. At this time we do not manage to
2366 transform all calls in the same manner as the expanders do, but
2367 we do transform most of them. */
2368 fndecl = get_callee_fndecl (*expr_p);
2369 if (fndecl
2370 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2371 switch (DECL_FUNCTION_CODE (fndecl))
2373 case BUILT_IN_VA_START:
2375 builtin_va_start_p = TRUE;
2376 if (call_expr_nargs (*expr_p) < 2)
2378 error ("too few arguments to function %<va_start%>");
2379 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2380 return GS_OK;
2383 if (fold_builtin_next_arg (*expr_p, true))
2385 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2386 return GS_OK;
2388 break;
2390 case BUILT_IN_LINE:
2392 *expr_p = build_int_cst (TREE_TYPE (*expr_p),
2393 LOCATION_LINE (EXPR_LOCATION (*expr_p)));
2394 return GS_OK;
2396 case BUILT_IN_FILE:
2398 const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p));
2399 *expr_p = build_string_literal (strlen (locfile) + 1, locfile);
2400 return GS_OK;
2402 case BUILT_IN_FUNCTION:
2404 const char *function;
2405 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2406 *expr_p = build_string_literal (strlen (function) + 1, function);
2407 return GS_OK;
2409 default:
2412 if (fndecl && DECL_BUILT_IN (fndecl))
2414 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2415 if (new_tree && new_tree != *expr_p)
2417 /* There was a transformation of this call which computes the
2418 same value, but in a more efficient way. Return and try
2419 again. */
2420 *expr_p = new_tree;
2421 return GS_OK;
2425 /* Remember the original function pointer type. */
2426 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2428 /* There is a sequence point before the call, so any side effects in
2429 the calling expression must occur before the actual call. Force
2430 gimplify_expr to use an internal post queue. */
2431 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2432 is_gimple_call_addr, fb_rvalue);
2434 nargs = call_expr_nargs (*expr_p);
2436 /* Get argument types for verification. */
2437 fndecl = get_callee_fndecl (*expr_p);
2438 parms = NULL_TREE;
2439 if (fndecl)
2440 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2441 else
2442 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
2444 if (fndecl && DECL_ARGUMENTS (fndecl))
2445 p = DECL_ARGUMENTS (fndecl);
2446 else if (parms)
2447 p = parms;
2448 else
2449 p = NULL_TREE;
2450 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2453 /* If the last argument is __builtin_va_arg_pack () and it is not
2454 passed as a named argument, decrease the number of CALL_EXPR
2455 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2456 if (!p
2457 && i < nargs
2458 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2460 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2461 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2463 if (last_arg_fndecl
2464 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2465 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2466 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2468 tree call = *expr_p;
2470 --nargs;
2471 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2472 CALL_EXPR_FN (call),
2473 nargs, CALL_EXPR_ARGP (call));
2475 /* Copy all CALL_EXPR flags, location and block, except
2476 CALL_EXPR_VA_ARG_PACK flag. */
2477 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2478 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2479 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2480 = CALL_EXPR_RETURN_SLOT_OPT (call);
2481 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2482 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2484 /* Set CALL_EXPR_VA_ARG_PACK. */
2485 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2489 /* Gimplify the function arguments. */
2490 if (nargs > 0)
2492 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2493 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2494 PUSH_ARGS_REVERSED ? i-- : i++)
2496 enum gimplify_status t;
2498 /* Avoid gimplifying the second argument to va_start, which needs to
2499 be the plain PARM_DECL. */
2500 if ((i != 1) || !builtin_va_start_p)
2502 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2503 EXPR_LOCATION (*expr_p));
2505 if (t == GS_ERROR)
2506 ret = GS_ERROR;
2511 /* Gimplify the static chain. */
2512 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
2514 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
2515 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
2516 else
2518 enum gimplify_status t;
2519 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
2520 EXPR_LOCATION (*expr_p));
2521 if (t == GS_ERROR)
2522 ret = GS_ERROR;
2526 /* Verify the function result. */
2527 if (want_value && fndecl
2528 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2530 error_at (loc, "using result of function returning %<void%>");
2531 ret = GS_ERROR;
2534 /* Try this again in case gimplification exposed something. */
2535 if (ret != GS_ERROR)
2537 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2539 if (new_tree && new_tree != *expr_p)
2541 /* There was a transformation of this call which computes the
2542 same value, but in a more efficient way. Return and try
2543 again. */
2544 *expr_p = new_tree;
2545 return GS_OK;
2548 else
2550 *expr_p = error_mark_node;
2551 return GS_ERROR;
2554 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2555 decl. This allows us to eliminate redundant or useless
2556 calls to "const" functions. */
2557 if (TREE_CODE (*expr_p) == CALL_EXPR)
2559 int flags = call_expr_flags (*expr_p);
2560 if (flags & (ECF_CONST | ECF_PURE)
2561 /* An infinite loop is considered a side effect. */
2562 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2563 TREE_SIDE_EFFECTS (*expr_p) = 0;
2566 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2567 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2568 form and delegate the creation of a GIMPLE_CALL to
2569 gimplify_modify_expr. This is always possible because when
2570 WANT_VALUE is true, the caller wants the result of this call into
2571 a temporary, which means that we will emit an INIT_EXPR in
2572 internal_get_tmp_var which will then be handled by
2573 gimplify_modify_expr. */
2574 if (!want_value)
2576 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2577 have to do is replicate it as a GIMPLE_CALL tuple. */
2578 gimple_stmt_iterator gsi;
2579 call = gimple_build_call_from_tree (*expr_p);
2580 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2581 notice_special_calls (call);
2582 gimplify_seq_add_stmt (pre_p, call);
2583 gsi = gsi_last (*pre_p);
2584 maybe_fold_stmt (&gsi);
2585 *expr_p = NULL_TREE;
2587 else
2588 /* Remember the original function type. */
2589 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2590 CALL_EXPR_FN (*expr_p));
2592 return ret;
2595 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2596 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2598 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2599 condition is true or false, respectively. If null, we should generate
2600 our own to skip over the evaluation of this specific expression.
2602 LOCUS is the source location of the COND_EXPR.
2604 This function is the tree equivalent of do_jump.
2606 shortcut_cond_r should only be called by shortcut_cond_expr. */
2608 static tree
2609 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2610 location_t locus)
2612 tree local_label = NULL_TREE;
2613 tree t, expr = NULL;
2615 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2616 retain the shortcut semantics. Just insert the gotos here;
2617 shortcut_cond_expr will append the real blocks later. */
2618 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2620 location_t new_locus;
2622 /* Turn if (a && b) into
2624 if (a); else goto no;
2625 if (b) goto yes; else goto no;
2626 (no:) */
2628 if (false_label_p == NULL)
2629 false_label_p = &local_label;
2631 /* Keep the original source location on the first 'if'. */
2632 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2633 append_to_statement_list (t, &expr);
2635 /* Set the source location of the && on the second 'if'. */
2636 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2637 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2638 new_locus);
2639 append_to_statement_list (t, &expr);
2641 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2643 location_t new_locus;
2645 /* Turn if (a || b) into
2647 if (a) goto yes;
2648 if (b) goto yes; else goto no;
2649 (yes:) */
2651 if (true_label_p == NULL)
2652 true_label_p = &local_label;
2654 /* Keep the original source location on the first 'if'. */
2655 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2656 append_to_statement_list (t, &expr);
2658 /* Set the source location of the || on the second 'if'. */
2659 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2660 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2661 new_locus);
2662 append_to_statement_list (t, &expr);
2664 else if (TREE_CODE (pred) == COND_EXPR
2665 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2666 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2668 location_t new_locus;
2670 /* As long as we're messing with gotos, turn if (a ? b : c) into
2671 if (a)
2672 if (b) goto yes; else goto no;
2673 else
2674 if (c) goto yes; else goto no;
2676 Don't do this if one of the arms has void type, which can happen
2677 in C++ when the arm is throw. */
2679 /* Keep the original source location on the first 'if'. Set the source
2680 location of the ? on the second 'if'. */
2681 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2682 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2683 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2684 false_label_p, locus),
2685 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2686 false_label_p, new_locus));
2688 else
2690 expr = build3 (COND_EXPR, void_type_node, pred,
2691 build_and_jump (true_label_p),
2692 build_and_jump (false_label_p));
2693 SET_EXPR_LOCATION (expr, locus);
2696 if (local_label)
2698 t = build1 (LABEL_EXPR, void_type_node, local_label);
2699 append_to_statement_list (t, &expr);
2702 return expr;
2705 /* Given a conditional expression EXPR with short-circuit boolean
2706 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2707 predicate apart into the equivalent sequence of conditionals. */
2709 static tree
2710 shortcut_cond_expr (tree expr)
2712 tree pred = TREE_OPERAND (expr, 0);
2713 tree then_ = TREE_OPERAND (expr, 1);
2714 tree else_ = TREE_OPERAND (expr, 2);
2715 tree true_label, false_label, end_label, t;
2716 tree *true_label_p;
2717 tree *false_label_p;
2718 bool emit_end, emit_false, jump_over_else;
2719 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2720 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2722 /* First do simple transformations. */
2723 if (!else_se)
2725 /* If there is no 'else', turn
2726 if (a && b) then c
2727 into
2728 if (a) if (b) then c. */
2729 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2731 /* Keep the original source location on the first 'if'. */
2732 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2733 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2734 /* Set the source location of the && on the second 'if'. */
2735 if (EXPR_HAS_LOCATION (pred))
2736 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2737 then_ = shortcut_cond_expr (expr);
2738 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2739 pred = TREE_OPERAND (pred, 0);
2740 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2741 SET_EXPR_LOCATION (expr, locus);
2745 if (!then_se)
2747 /* If there is no 'then', turn
2748 if (a || b); else d
2749 into
2750 if (a); else if (b); else d. */
2751 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2753 /* Keep the original source location on the first 'if'. */
2754 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2755 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2756 /* Set the source location of the || on the second 'if'. */
2757 if (EXPR_HAS_LOCATION (pred))
2758 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2759 else_ = shortcut_cond_expr (expr);
2760 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2761 pred = TREE_OPERAND (pred, 0);
2762 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2763 SET_EXPR_LOCATION (expr, locus);
2767 /* If we're done, great. */
2768 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2769 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2770 return expr;
2772 /* Otherwise we need to mess with gotos. Change
2773 if (a) c; else d;
2775 if (a); else goto no;
2776 c; goto end;
2777 no: d; end:
2778 and recursively gimplify the condition. */
2780 true_label = false_label = end_label = NULL_TREE;
2782 /* If our arms just jump somewhere, hijack those labels so we don't
2783 generate jumps to jumps. */
2785 if (then_
2786 && TREE_CODE (then_) == GOTO_EXPR
2787 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2789 true_label = GOTO_DESTINATION (then_);
2790 then_ = NULL;
2791 then_se = false;
2794 if (else_
2795 && TREE_CODE (else_) == GOTO_EXPR
2796 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2798 false_label = GOTO_DESTINATION (else_);
2799 else_ = NULL;
2800 else_se = false;
2803 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2804 if (true_label)
2805 true_label_p = &true_label;
2806 else
2807 true_label_p = NULL;
2809 /* The 'else' branch also needs a label if it contains interesting code. */
2810 if (false_label || else_se)
2811 false_label_p = &false_label;
2812 else
2813 false_label_p = NULL;
2815 /* If there was nothing else in our arms, just forward the label(s). */
2816 if (!then_se && !else_se)
2817 return shortcut_cond_r (pred, true_label_p, false_label_p,
2818 EXPR_LOC_OR_LOC (expr, input_location));
2820 /* If our last subexpression already has a terminal label, reuse it. */
2821 if (else_se)
2822 t = expr_last (else_);
2823 else if (then_se)
2824 t = expr_last (then_);
2825 else
2826 t = NULL;
2827 if (t && TREE_CODE (t) == LABEL_EXPR)
2828 end_label = LABEL_EXPR_LABEL (t);
2830 /* If we don't care about jumping to the 'else' branch, jump to the end
2831 if the condition is false. */
2832 if (!false_label_p)
2833 false_label_p = &end_label;
2835 /* We only want to emit these labels if we aren't hijacking them. */
2836 emit_end = (end_label == NULL_TREE);
2837 emit_false = (false_label == NULL_TREE);
2839 /* We only emit the jump over the else clause if we have to--if the
2840 then clause may fall through. Otherwise we can wind up with a
2841 useless jump and a useless label at the end of gimplified code,
2842 which will cause us to think that this conditional as a whole
2843 falls through even if it doesn't. If we then inline a function
2844 which ends with such a condition, that can cause us to issue an
2845 inappropriate warning about control reaching the end of a
2846 non-void function. */
2847 jump_over_else = block_may_fallthru (then_);
2849 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2850 EXPR_LOC_OR_LOC (expr, input_location));
2852 expr = NULL;
2853 append_to_statement_list (pred, &expr);
2855 append_to_statement_list (then_, &expr);
2856 if (else_se)
2858 if (jump_over_else)
2860 tree last = expr_last (expr);
2861 t = build_and_jump (&end_label);
2862 if (EXPR_HAS_LOCATION (last))
2863 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2864 append_to_statement_list (t, &expr);
2866 if (emit_false)
2868 t = build1 (LABEL_EXPR, void_type_node, false_label);
2869 append_to_statement_list (t, &expr);
2871 append_to_statement_list (else_, &expr);
2873 if (emit_end && end_label)
2875 t = build1 (LABEL_EXPR, void_type_node, end_label);
2876 append_to_statement_list (t, &expr);
2879 return expr;
2882 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2884 tree
2885 gimple_boolify (tree expr)
2887 tree type = TREE_TYPE (expr);
2888 location_t loc = EXPR_LOCATION (expr);
2890 if (TREE_CODE (expr) == NE_EXPR
2891 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2892 && integer_zerop (TREE_OPERAND (expr, 1)))
2894 tree call = TREE_OPERAND (expr, 0);
2895 tree fn = get_callee_fndecl (call);
2897 /* For __builtin_expect ((long) (x), y) recurse into x as well
2898 if x is truth_value_p. */
2899 if (fn
2900 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2901 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2902 && call_expr_nargs (call) == 2)
2904 tree arg = CALL_EXPR_ARG (call, 0);
2905 if (arg)
2907 if (TREE_CODE (arg) == NOP_EXPR
2908 && TREE_TYPE (arg) == TREE_TYPE (call))
2909 arg = TREE_OPERAND (arg, 0);
2910 if (truth_value_p (TREE_CODE (arg)))
2912 arg = gimple_boolify (arg);
2913 CALL_EXPR_ARG (call, 0)
2914 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2920 switch (TREE_CODE (expr))
2922 case TRUTH_AND_EXPR:
2923 case TRUTH_OR_EXPR:
2924 case TRUTH_XOR_EXPR:
2925 case TRUTH_ANDIF_EXPR:
2926 case TRUTH_ORIF_EXPR:
2927 /* Also boolify the arguments of truth exprs. */
2928 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2929 /* FALLTHRU */
2931 case TRUTH_NOT_EXPR:
2932 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2934 /* These expressions always produce boolean results. */
2935 if (TREE_CODE (type) != BOOLEAN_TYPE)
2936 TREE_TYPE (expr) = boolean_type_node;
2937 return expr;
2939 case ANNOTATE_EXPR:
2940 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
2942 case annot_expr_ivdep_kind:
2943 case annot_expr_no_vector_kind:
2944 case annot_expr_vector_kind:
2945 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2946 if (TREE_CODE (type) != BOOLEAN_TYPE)
2947 TREE_TYPE (expr) = boolean_type_node;
2948 return expr;
2949 default:
2950 gcc_unreachable ();
2953 default:
2954 if (COMPARISON_CLASS_P (expr))
2956 /* There expressions always prduce boolean results. */
2957 if (TREE_CODE (type) != BOOLEAN_TYPE)
2958 TREE_TYPE (expr) = boolean_type_node;
2959 return expr;
2961 /* Other expressions that get here must have boolean values, but
2962 might need to be converted to the appropriate mode. */
2963 if (TREE_CODE (type) == BOOLEAN_TYPE)
2964 return expr;
2965 return fold_convert_loc (loc, boolean_type_node, expr);
2969 /* Given a conditional expression *EXPR_P without side effects, gimplify
2970 its operands. New statements are inserted to PRE_P. */
2972 static enum gimplify_status
2973 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2975 tree expr = *expr_p, cond;
2976 enum gimplify_status ret, tret;
2977 enum tree_code code;
2979 cond = gimple_boolify (COND_EXPR_COND (expr));
2981 /* We need to handle && and || specially, as their gimplification
2982 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2983 code = TREE_CODE (cond);
2984 if (code == TRUTH_ANDIF_EXPR)
2985 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2986 else if (code == TRUTH_ORIF_EXPR)
2987 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2988 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2989 COND_EXPR_COND (*expr_p) = cond;
2991 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2992 is_gimple_val, fb_rvalue);
2993 ret = MIN (ret, tret);
2994 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2995 is_gimple_val, fb_rvalue);
2997 return MIN (ret, tret);
3000 /* Return true if evaluating EXPR could trap.
3001 EXPR is GENERIC, while tree_could_trap_p can be called
3002 only on GIMPLE. */
3004 static bool
3005 generic_expr_could_trap_p (tree expr)
3007 unsigned i, n;
3009 if (!expr || is_gimple_val (expr))
3010 return false;
3012 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3013 return true;
3015 n = TREE_OPERAND_LENGTH (expr);
3016 for (i = 0; i < n; i++)
3017 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3018 return true;
3020 return false;
3023 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3024 into
3026 if (p) if (p)
3027 t1 = a; a;
3028 else or else
3029 t1 = b; b;
3032 The second form is used when *EXPR_P is of type void.
3034 PRE_P points to the list where side effects that must happen before
3035 *EXPR_P should be stored. */
3037 static enum gimplify_status
3038 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3040 tree expr = *expr_p;
3041 tree type = TREE_TYPE (expr);
3042 location_t loc = EXPR_LOCATION (expr);
3043 tree tmp, arm1, arm2;
3044 enum gimplify_status ret;
3045 tree label_true, label_false, label_cont;
3046 bool have_then_clause_p, have_else_clause_p;
3047 gcond *cond_stmt;
3048 enum tree_code pred_code;
3049 gimple_seq seq = NULL;
3051 /* If this COND_EXPR has a value, copy the values into a temporary within
3052 the arms. */
3053 if (!VOID_TYPE_P (type))
3055 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3056 tree result;
3058 /* If either an rvalue is ok or we do not require an lvalue, create the
3059 temporary. But we cannot do that if the type is addressable. */
3060 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3061 && !TREE_ADDRESSABLE (type))
3063 if (gimplify_ctxp->allow_rhs_cond_expr
3064 /* If either branch has side effects or could trap, it can't be
3065 evaluated unconditionally. */
3066 && !TREE_SIDE_EFFECTS (then_)
3067 && !generic_expr_could_trap_p (then_)
3068 && !TREE_SIDE_EFFECTS (else_)
3069 && !generic_expr_could_trap_p (else_))
3070 return gimplify_pure_cond_expr (expr_p, pre_p);
3072 tmp = create_tmp_var (type, "iftmp");
3073 result = tmp;
3076 /* Otherwise, only create and copy references to the values. */
3077 else
3079 type = build_pointer_type (type);
3081 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3082 then_ = build_fold_addr_expr_loc (loc, then_);
3084 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3085 else_ = build_fold_addr_expr_loc (loc, else_);
3087 expr
3088 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3090 tmp = create_tmp_var (type, "iftmp");
3091 result = build_simple_mem_ref_loc (loc, tmp);
3094 /* Build the new then clause, `tmp = then_;'. But don't build the
3095 assignment if the value is void; in C++ it can be if it's a throw. */
3096 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3097 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3099 /* Similarly, build the new else clause, `tmp = else_;'. */
3100 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3101 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3103 TREE_TYPE (expr) = void_type_node;
3104 recalculate_side_effects (expr);
3106 /* Move the COND_EXPR to the prequeue. */
3107 gimplify_stmt (&expr, pre_p);
3109 *expr_p = result;
3110 return GS_ALL_DONE;
3113 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3114 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3115 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3116 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3118 /* Make sure the condition has BOOLEAN_TYPE. */
3119 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3121 /* Break apart && and || conditions. */
3122 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3123 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3125 expr = shortcut_cond_expr (expr);
3127 if (expr != *expr_p)
3129 *expr_p = expr;
3131 /* We can't rely on gimplify_expr to re-gimplify the expanded
3132 form properly, as cleanups might cause the target labels to be
3133 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3134 set up a conditional context. */
3135 gimple_push_condition ();
3136 gimplify_stmt (expr_p, &seq);
3137 gimple_pop_condition (pre_p);
3138 gimple_seq_add_seq (pre_p, seq);
3140 return GS_ALL_DONE;
3144 /* Now do the normal gimplification. */
3146 /* Gimplify condition. */
3147 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3148 fb_rvalue);
3149 if (ret == GS_ERROR)
3150 return GS_ERROR;
3151 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3153 gimple_push_condition ();
3155 have_then_clause_p = have_else_clause_p = false;
3156 if (TREE_OPERAND (expr, 1) != NULL
3157 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3158 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3159 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3160 == current_function_decl)
3161 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3162 have different locations, otherwise we end up with incorrect
3163 location information on the branches. */
3164 && (optimize
3165 || !EXPR_HAS_LOCATION (expr)
3166 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3167 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3169 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3170 have_then_clause_p = true;
3172 else
3173 label_true = create_artificial_label (UNKNOWN_LOCATION);
3174 if (TREE_OPERAND (expr, 2) != NULL
3175 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3176 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3177 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3178 == current_function_decl)
3179 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3180 have different locations, otherwise we end up with incorrect
3181 location information on the branches. */
3182 && (optimize
3183 || !EXPR_HAS_LOCATION (expr)
3184 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3185 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3187 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3188 have_else_clause_p = true;
3190 else
3191 label_false = create_artificial_label (UNKNOWN_LOCATION);
3193 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3194 &arm2);
3195 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3196 label_false);
3197 gimplify_seq_add_stmt (&seq, cond_stmt);
3198 gimple_stmt_iterator gsi = gsi_last (seq);
3199 maybe_fold_stmt (&gsi);
3201 label_cont = NULL_TREE;
3202 if (!have_then_clause_p)
3204 /* For if (...) {} else { code; } put label_true after
3205 the else block. */
3206 if (TREE_OPERAND (expr, 1) == NULL_TREE
3207 && !have_else_clause_p
3208 && TREE_OPERAND (expr, 2) != NULL_TREE)
3209 label_cont = label_true;
3210 else
3212 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3213 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3214 /* For if (...) { code; } else {} or
3215 if (...) { code; } else goto label; or
3216 if (...) { code; return; } else { ... }
3217 label_cont isn't needed. */
3218 if (!have_else_clause_p
3219 && TREE_OPERAND (expr, 2) != NULL_TREE
3220 && gimple_seq_may_fallthru (seq))
3222 gimple *g;
3223 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3225 g = gimple_build_goto (label_cont);
3227 /* GIMPLE_COND's are very low level; they have embedded
3228 gotos. This particular embedded goto should not be marked
3229 with the location of the original COND_EXPR, as it would
3230 correspond to the COND_EXPR's condition, not the ELSE or the
3231 THEN arms. To avoid marking it with the wrong location, flag
3232 it as "no location". */
3233 gimple_set_do_not_emit_location (g);
3235 gimplify_seq_add_stmt (&seq, g);
3239 if (!have_else_clause_p)
3241 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3242 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3244 if (label_cont)
3245 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3247 gimple_pop_condition (pre_p);
3248 gimple_seq_add_seq (pre_p, seq);
3250 if (ret == GS_ERROR)
3251 ; /* Do nothing. */
3252 else if (have_then_clause_p || have_else_clause_p)
3253 ret = GS_ALL_DONE;
3254 else
3256 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3257 expr = TREE_OPERAND (expr, 0);
3258 gimplify_stmt (&expr, pre_p);
3261 *expr_p = NULL;
3262 return ret;
3265 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3266 to be marked addressable.
3268 We cannot rely on such an expression being directly markable if a temporary
3269 has been created by the gimplification. In this case, we create another
3270 temporary and initialize it with a copy, which will become a store after we
3271 mark it addressable. This can happen if the front-end passed us something
3272 that it could not mark addressable yet, like a Fortran pass-by-reference
3273 parameter (int) floatvar. */
3275 static void
3276 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3278 while (handled_component_p (*expr_p))
3279 expr_p = &TREE_OPERAND (*expr_p, 0);
3280 if (is_gimple_reg (*expr_p))
3282 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3283 DECL_GIMPLE_REG_P (var) = 0;
3284 *expr_p = var;
3288 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3289 a call to __builtin_memcpy. */
3291 static enum gimplify_status
3292 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3293 gimple_seq *seq_p)
3295 tree t, to, to_ptr, from, from_ptr;
3296 gcall *gs;
3297 location_t loc = EXPR_LOCATION (*expr_p);
3299 to = TREE_OPERAND (*expr_p, 0);
3300 from = TREE_OPERAND (*expr_p, 1);
3302 /* Mark the RHS addressable. Beware that it may not be possible to do so
3303 directly if a temporary has been created by the gimplification. */
3304 prepare_gimple_addressable (&from, seq_p);
3306 mark_addressable (from);
3307 from_ptr = build_fold_addr_expr_loc (loc, from);
3308 gimplify_arg (&from_ptr, seq_p, loc);
3310 mark_addressable (to);
3311 to_ptr = build_fold_addr_expr_loc (loc, to);
3312 gimplify_arg (&to_ptr, seq_p, loc);
3314 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3316 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3318 if (want_value)
3320 /* tmp = memcpy() */
3321 t = create_tmp_var (TREE_TYPE (to_ptr));
3322 gimple_call_set_lhs (gs, t);
3323 gimplify_seq_add_stmt (seq_p, gs);
3325 *expr_p = build_simple_mem_ref (t);
3326 return GS_ALL_DONE;
3329 gimplify_seq_add_stmt (seq_p, gs);
3330 *expr_p = NULL;
3331 return GS_ALL_DONE;
3334 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3335 a call to __builtin_memset. In this case we know that the RHS is
3336 a CONSTRUCTOR with an empty element list. */
3338 static enum gimplify_status
3339 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3340 gimple_seq *seq_p)
3342 tree t, from, to, to_ptr;
3343 gcall *gs;
3344 location_t loc = EXPR_LOCATION (*expr_p);
3346 /* Assert our assumptions, to abort instead of producing wrong code
3347 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3348 not be immediately exposed. */
3349 from = TREE_OPERAND (*expr_p, 1);
3350 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3351 from = TREE_OPERAND (from, 0);
3353 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3354 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3356 /* Now proceed. */
3357 to = TREE_OPERAND (*expr_p, 0);
3359 to_ptr = build_fold_addr_expr_loc (loc, to);
3360 gimplify_arg (&to_ptr, seq_p, loc);
3361 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3363 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3365 if (want_value)
3367 /* tmp = memset() */
3368 t = create_tmp_var (TREE_TYPE (to_ptr));
3369 gimple_call_set_lhs (gs, t);
3370 gimplify_seq_add_stmt (seq_p, gs);
3372 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3373 return GS_ALL_DONE;
3376 gimplify_seq_add_stmt (seq_p, gs);
3377 *expr_p = NULL;
3378 return GS_ALL_DONE;
3381 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3382 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3383 assignment. Return non-null if we detect a potential overlap. */
3385 struct gimplify_init_ctor_preeval_data
3387 /* The base decl of the lhs object. May be NULL, in which case we
3388 have to assume the lhs is indirect. */
3389 tree lhs_base_decl;
3391 /* The alias set of the lhs object. */
3392 alias_set_type lhs_alias_set;
3395 static tree
3396 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3398 struct gimplify_init_ctor_preeval_data *data
3399 = (struct gimplify_init_ctor_preeval_data *) xdata;
3400 tree t = *tp;
3402 /* If we find the base object, obviously we have overlap. */
3403 if (data->lhs_base_decl == t)
3404 return t;
3406 /* If the constructor component is indirect, determine if we have a
3407 potential overlap with the lhs. The only bits of information we
3408 have to go on at this point are addressability and alias sets. */
3409 if ((INDIRECT_REF_P (t)
3410 || TREE_CODE (t) == MEM_REF)
3411 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3412 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3413 return t;
3415 /* If the constructor component is a call, determine if it can hide a
3416 potential overlap with the lhs through an INDIRECT_REF like above.
3417 ??? Ugh - this is completely broken. In fact this whole analysis
3418 doesn't look conservative. */
3419 if (TREE_CODE (t) == CALL_EXPR)
3421 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3423 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3424 if (POINTER_TYPE_P (TREE_VALUE (type))
3425 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3426 && alias_sets_conflict_p (data->lhs_alias_set,
3427 get_alias_set
3428 (TREE_TYPE (TREE_VALUE (type)))))
3429 return t;
3432 if (IS_TYPE_OR_DECL_P (t))
3433 *walk_subtrees = 0;
3434 return NULL;
3437 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3438 force values that overlap with the lhs (as described by *DATA)
3439 into temporaries. */
3441 static void
3442 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3443 struct gimplify_init_ctor_preeval_data *data)
3445 enum gimplify_status one;
3447 /* If the value is constant, then there's nothing to pre-evaluate. */
3448 if (TREE_CONSTANT (*expr_p))
3450 /* Ensure it does not have side effects, it might contain a reference to
3451 the object we're initializing. */
3452 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3453 return;
3456 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3457 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3458 return;
3460 /* Recurse for nested constructors. */
3461 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3463 unsigned HOST_WIDE_INT ix;
3464 constructor_elt *ce;
3465 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3467 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3468 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3470 return;
3473 /* If this is a variable sized type, we must remember the size. */
3474 maybe_with_size_expr (expr_p);
3476 /* Gimplify the constructor element to something appropriate for the rhs
3477 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3478 the gimplifier will consider this a store to memory. Doing this
3479 gimplification now means that we won't have to deal with complicated
3480 language-specific trees, nor trees like SAVE_EXPR that can induce
3481 exponential search behavior. */
3482 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3483 if (one == GS_ERROR)
3485 *expr_p = NULL;
3486 return;
3489 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3490 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3491 always be true for all scalars, since is_gimple_mem_rhs insists on a
3492 temporary variable for them. */
3493 if (DECL_P (*expr_p))
3494 return;
3496 /* If this is of variable size, we have no choice but to assume it doesn't
3497 overlap since we can't make a temporary for it. */
3498 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3499 return;
3501 /* Otherwise, we must search for overlap ... */
3502 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3503 return;
3505 /* ... and if found, force the value into a temporary. */
3506 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3509 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3510 a RANGE_EXPR in a CONSTRUCTOR for an array.
3512 var = lower;
3513 loop_entry:
3514 object[var] = value;
3515 if (var == upper)
3516 goto loop_exit;
3517 var = var + 1;
3518 goto loop_entry;
3519 loop_exit:
3521 We increment var _after_ the loop exit check because we might otherwise
3522 fail if upper == TYPE_MAX_VALUE (type for upper).
3524 Note that we never have to deal with SAVE_EXPRs here, because this has
3525 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3527 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3528 gimple_seq *, bool);
3530 static void
3531 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3532 tree value, tree array_elt_type,
3533 gimple_seq *pre_p, bool cleared)
3535 tree loop_entry_label, loop_exit_label, fall_thru_label;
3536 tree var, var_type, cref, tmp;
3538 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3539 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3540 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3542 /* Create and initialize the index variable. */
3543 var_type = TREE_TYPE (upper);
3544 var = create_tmp_var (var_type);
3545 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3547 /* Add the loop entry label. */
3548 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3550 /* Build the reference. */
3551 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3552 var, NULL_TREE, NULL_TREE);
3554 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3555 the store. Otherwise just assign value to the reference. */
3557 if (TREE_CODE (value) == CONSTRUCTOR)
3558 /* NB we might have to call ourself recursively through
3559 gimplify_init_ctor_eval if the value is a constructor. */
3560 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3561 pre_p, cleared);
3562 else
3563 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3565 /* We exit the loop when the index var is equal to the upper bound. */
3566 gimplify_seq_add_stmt (pre_p,
3567 gimple_build_cond (EQ_EXPR, var, upper,
3568 loop_exit_label, fall_thru_label));
3570 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3572 /* Otherwise, increment the index var... */
3573 tmp = build2 (PLUS_EXPR, var_type, var,
3574 fold_convert (var_type, integer_one_node));
3575 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3577 /* ...and jump back to the loop entry. */
3578 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3580 /* Add the loop exit label. */
3581 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3584 /* Return true if FDECL is accessing a field that is zero sized. */
3586 static bool
3587 zero_sized_field_decl (const_tree fdecl)
3589 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3590 && integer_zerop (DECL_SIZE (fdecl)))
3591 return true;
3592 return false;
3595 /* Return true if TYPE is zero sized. */
3597 static bool
3598 zero_sized_type (const_tree type)
3600 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3601 && integer_zerop (TYPE_SIZE (type)))
3602 return true;
3603 return false;
3606 /* A subroutine of gimplify_init_constructor. Generate individual
3607 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3608 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3609 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3610 zeroed first. */
3612 static void
3613 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3614 gimple_seq *pre_p, bool cleared)
3616 tree array_elt_type = NULL;
3617 unsigned HOST_WIDE_INT ix;
3618 tree purpose, value;
3620 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3621 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3623 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3625 tree cref;
3627 /* NULL values are created above for gimplification errors. */
3628 if (value == NULL)
3629 continue;
3631 if (cleared && initializer_zerop (value))
3632 continue;
3634 /* ??? Here's to hoping the front end fills in all of the indices,
3635 so we don't have to figure out what's missing ourselves. */
3636 gcc_assert (purpose);
3638 /* Skip zero-sized fields, unless value has side-effects. This can
3639 happen with calls to functions returning a zero-sized type, which
3640 we shouldn't discard. As a number of downstream passes don't
3641 expect sets of zero-sized fields, we rely on the gimplification of
3642 the MODIFY_EXPR we make below to drop the assignment statement. */
3643 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3644 continue;
3646 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3647 whole range. */
3648 if (TREE_CODE (purpose) == RANGE_EXPR)
3650 tree lower = TREE_OPERAND (purpose, 0);
3651 tree upper = TREE_OPERAND (purpose, 1);
3653 /* If the lower bound is equal to upper, just treat it as if
3654 upper was the index. */
3655 if (simple_cst_equal (lower, upper))
3656 purpose = upper;
3657 else
3659 gimplify_init_ctor_eval_range (object, lower, upper, value,
3660 array_elt_type, pre_p, cleared);
3661 continue;
3665 if (array_elt_type)
3667 /* Do not use bitsizetype for ARRAY_REF indices. */
3668 if (TYPE_DOMAIN (TREE_TYPE (object)))
3669 purpose
3670 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3671 purpose);
3672 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3673 purpose, NULL_TREE, NULL_TREE);
3675 else
3677 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3678 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3679 unshare_expr (object), purpose, NULL_TREE);
3682 if (TREE_CODE (value) == CONSTRUCTOR
3683 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3684 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3685 pre_p, cleared);
3686 else
3688 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3689 gimplify_and_add (init, pre_p);
3690 ggc_free (init);
3695 /* Return the appropriate RHS predicate for this LHS. */
3697 gimple_predicate
3698 rhs_predicate_for (tree lhs)
3700 if (is_gimple_reg (lhs))
3701 return is_gimple_reg_rhs_or_call;
3702 else
3703 return is_gimple_mem_rhs_or_call;
3706 /* Gimplify a C99 compound literal expression. This just means adding
3707 the DECL_EXPR before the current statement and using its anonymous
3708 decl instead. */
3710 static enum gimplify_status
3711 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3712 bool (*gimple_test_f) (tree),
3713 fallback_t fallback)
3715 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3716 tree decl = DECL_EXPR_DECL (decl_s);
3717 tree init = DECL_INITIAL (decl);
3718 /* Mark the decl as addressable if the compound literal
3719 expression is addressable now, otherwise it is marked too late
3720 after we gimplify the initialization expression. */
3721 if (TREE_ADDRESSABLE (*expr_p))
3722 TREE_ADDRESSABLE (decl) = 1;
3723 /* Otherwise, if we don't need an lvalue and have a literal directly
3724 substitute it. Check if it matches the gimple predicate, as
3725 otherwise we'd generate a new temporary, and we can as well just
3726 use the decl we already have. */
3727 else if (!TREE_ADDRESSABLE (decl)
3728 && init
3729 && (fallback & fb_lvalue) == 0
3730 && gimple_test_f (init))
3732 *expr_p = init;
3733 return GS_OK;
3736 /* Preliminarily mark non-addressed complex variables as eligible
3737 for promotion to gimple registers. We'll transform their uses
3738 as we find them. */
3739 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3740 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3741 && !TREE_THIS_VOLATILE (decl)
3742 && !needs_to_live_in_memory (decl))
3743 DECL_GIMPLE_REG_P (decl) = 1;
3745 /* If the decl is not addressable, then it is being used in some
3746 expression or on the right hand side of a statement, and it can
3747 be put into a readonly data section. */
3748 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3749 TREE_READONLY (decl) = 1;
3751 /* This decl isn't mentioned in the enclosing block, so add it to the
3752 list of temps. FIXME it seems a bit of a kludge to say that
3753 anonymous artificial vars aren't pushed, but everything else is. */
3754 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3755 gimple_add_tmp_var (decl);
3757 gimplify_and_add (decl_s, pre_p);
3758 *expr_p = decl;
3759 return GS_OK;
3762 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3763 return a new CONSTRUCTOR if something changed. */
3765 static tree
3766 optimize_compound_literals_in_ctor (tree orig_ctor)
3768 tree ctor = orig_ctor;
3769 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3770 unsigned int idx, num = vec_safe_length (elts);
3772 for (idx = 0; idx < num; idx++)
3774 tree value = (*elts)[idx].value;
3775 tree newval = value;
3776 if (TREE_CODE (value) == CONSTRUCTOR)
3777 newval = optimize_compound_literals_in_ctor (value);
3778 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3780 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3781 tree decl = DECL_EXPR_DECL (decl_s);
3782 tree init = DECL_INITIAL (decl);
3784 if (!TREE_ADDRESSABLE (value)
3785 && !TREE_ADDRESSABLE (decl)
3786 && init
3787 && TREE_CODE (init) == CONSTRUCTOR)
3788 newval = optimize_compound_literals_in_ctor (init);
3790 if (newval == value)
3791 continue;
3793 if (ctor == orig_ctor)
3795 ctor = copy_node (orig_ctor);
3796 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3797 elts = CONSTRUCTOR_ELTS (ctor);
3799 (*elts)[idx].value = newval;
3801 return ctor;
3804 /* A subroutine of gimplify_modify_expr. Break out elements of a
3805 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3807 Note that we still need to clear any elements that don't have explicit
3808 initializers, so if not all elements are initialized we keep the
3809 original MODIFY_EXPR, we just remove all of the constructor elements.
3811 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3812 GS_ERROR if we would have to create a temporary when gimplifying
3813 this constructor. Otherwise, return GS_OK.
3815 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3817 static enum gimplify_status
3818 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3819 bool want_value, bool notify_temp_creation)
3821 tree object, ctor, type;
3822 enum gimplify_status ret;
3823 vec<constructor_elt, va_gc> *elts;
3825 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3827 if (!notify_temp_creation)
3829 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3830 is_gimple_lvalue, fb_lvalue);
3831 if (ret == GS_ERROR)
3832 return ret;
3835 object = TREE_OPERAND (*expr_p, 0);
3836 ctor = TREE_OPERAND (*expr_p, 1) =
3837 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3838 type = TREE_TYPE (ctor);
3839 elts = CONSTRUCTOR_ELTS (ctor);
3840 ret = GS_ALL_DONE;
3842 switch (TREE_CODE (type))
3844 case RECORD_TYPE:
3845 case UNION_TYPE:
3846 case QUAL_UNION_TYPE:
3847 case ARRAY_TYPE:
3849 struct gimplify_init_ctor_preeval_data preeval_data;
3850 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3851 bool cleared, complete_p, valid_const_initializer;
3853 /* Aggregate types must lower constructors to initialization of
3854 individual elements. The exception is that a CONSTRUCTOR node
3855 with no elements indicates zero-initialization of the whole. */
3856 if (vec_safe_is_empty (elts))
3858 if (notify_temp_creation)
3859 return GS_OK;
3860 break;
3863 /* Fetch information about the constructor to direct later processing.
3864 We might want to make static versions of it in various cases, and
3865 can only do so if it known to be a valid constant initializer. */
3866 valid_const_initializer
3867 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3868 &num_ctor_elements, &complete_p);
3870 /* If a const aggregate variable is being initialized, then it
3871 should never be a lose to promote the variable to be static. */
3872 if (valid_const_initializer
3873 && num_nonzero_elements > 1
3874 && TREE_READONLY (object)
3875 && TREE_CODE (object) == VAR_DECL
3876 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3878 if (notify_temp_creation)
3879 return GS_ERROR;
3880 DECL_INITIAL (object) = ctor;
3881 TREE_STATIC (object) = 1;
3882 if (!DECL_NAME (object))
3883 DECL_NAME (object) = create_tmp_var_name ("C");
3884 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3886 /* ??? C++ doesn't automatically append a .<number> to the
3887 assembler name, and even when it does, it looks at FE private
3888 data structures to figure out what that number should be,
3889 which are not set for this variable. I suppose this is
3890 important for local statics for inline functions, which aren't
3891 "local" in the object file sense. So in order to get a unique
3892 TU-local symbol, we must invoke the lhd version now. */
3893 lhd_set_decl_assembler_name (object);
3895 *expr_p = NULL_TREE;
3896 break;
3899 /* If there are "lots" of initialized elements, even discounting
3900 those that are not address constants (and thus *must* be
3901 computed at runtime), then partition the constructor into
3902 constant and non-constant parts. Block copy the constant
3903 parts in, then generate code for the non-constant parts. */
3904 /* TODO. There's code in cp/typeck.c to do this. */
3906 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3907 /* store_constructor will ignore the clearing of variable-sized
3908 objects. Initializers for such objects must explicitly set
3909 every field that needs to be set. */
3910 cleared = false;
3911 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3912 /* If the constructor isn't complete, clear the whole object
3913 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3915 ??? This ought not to be needed. For any element not present
3916 in the initializer, we should simply set them to zero. Except
3917 we'd need to *find* the elements that are not present, and that
3918 requires trickery to avoid quadratic compile-time behavior in
3919 large cases or excessive memory use in small cases. */
3920 cleared = true;
3921 else if (num_ctor_elements - num_nonzero_elements
3922 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3923 && num_nonzero_elements < num_ctor_elements / 4)
3924 /* If there are "lots" of zeros, it's more efficient to clear
3925 the memory and then set the nonzero elements. */
3926 cleared = true;
3927 else
3928 cleared = false;
3930 /* If there are "lots" of initialized elements, and all of them
3931 are valid address constants, then the entire initializer can
3932 be dropped to memory, and then memcpy'd out. Don't do this
3933 for sparse arrays, though, as it's more efficient to follow
3934 the standard CONSTRUCTOR behavior of memset followed by
3935 individual element initialization. Also don't do this for small
3936 all-zero initializers (which aren't big enough to merit
3937 clearing), and don't try to make bitwise copies of
3938 TREE_ADDRESSABLE types.
3940 We cannot apply such transformation when compiling chkp static
3941 initializer because creation of initializer image in the memory
3942 will require static initialization of bounds for it. It should
3943 result in another gimplification of similar initializer and we
3944 may fall into infinite loop. */
3945 if (valid_const_initializer
3946 && !(cleared || num_nonzero_elements == 0)
3947 && !TREE_ADDRESSABLE (type)
3948 && (!current_function_decl
3949 || !lookup_attribute ("chkp ctor",
3950 DECL_ATTRIBUTES (current_function_decl))))
3952 HOST_WIDE_INT size = int_size_in_bytes (type);
3953 unsigned int align;
3955 /* ??? We can still get unbounded array types, at least
3956 from the C++ front end. This seems wrong, but attempt
3957 to work around it for now. */
3958 if (size < 0)
3960 size = int_size_in_bytes (TREE_TYPE (object));
3961 if (size >= 0)
3962 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3965 /* Find the maximum alignment we can assume for the object. */
3966 /* ??? Make use of DECL_OFFSET_ALIGN. */
3967 if (DECL_P (object))
3968 align = DECL_ALIGN (object);
3969 else
3970 align = TYPE_ALIGN (type);
3972 /* Do a block move either if the size is so small as to make
3973 each individual move a sub-unit move on average, or if it
3974 is so large as to make individual moves inefficient. */
3975 if (size > 0
3976 && num_nonzero_elements > 1
3977 && (size < num_nonzero_elements
3978 || !can_move_by_pieces (size, align)))
3980 if (notify_temp_creation)
3981 return GS_ERROR;
3983 walk_tree (&ctor, force_labels_r, NULL, NULL);
3984 ctor = tree_output_constant_def (ctor);
3985 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3986 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3987 TREE_OPERAND (*expr_p, 1) = ctor;
3989 /* This is no longer an assignment of a CONSTRUCTOR, but
3990 we still may have processing to do on the LHS. So
3991 pretend we didn't do anything here to let that happen. */
3992 return GS_UNHANDLED;
3996 /* If the target is volatile, we have non-zero elements and more than
3997 one field to assign, initialize the target from a temporary. */
3998 if (TREE_THIS_VOLATILE (object)
3999 && !TREE_ADDRESSABLE (type)
4000 && num_nonzero_elements > 0
4001 && vec_safe_length (elts) > 1)
4003 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4004 TREE_OPERAND (*expr_p, 0) = temp;
4005 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4006 *expr_p,
4007 build2 (MODIFY_EXPR, void_type_node,
4008 object, temp));
4009 return GS_OK;
4012 if (notify_temp_creation)
4013 return GS_OK;
4015 /* If there are nonzero elements and if needed, pre-evaluate to capture
4016 elements overlapping with the lhs into temporaries. We must do this
4017 before clearing to fetch the values before they are zeroed-out. */
4018 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4020 preeval_data.lhs_base_decl = get_base_address (object);
4021 if (!DECL_P (preeval_data.lhs_base_decl))
4022 preeval_data.lhs_base_decl = NULL;
4023 preeval_data.lhs_alias_set = get_alias_set (object);
4025 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4026 pre_p, post_p, &preeval_data);
4029 bool ctor_has_side_effects_p
4030 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4032 if (cleared)
4034 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4035 Note that we still have to gimplify, in order to handle the
4036 case of variable sized types. Avoid shared tree structures. */
4037 CONSTRUCTOR_ELTS (ctor) = NULL;
4038 TREE_SIDE_EFFECTS (ctor) = 0;
4039 object = unshare_expr (object);
4040 gimplify_stmt (expr_p, pre_p);
4043 /* If we have not block cleared the object, or if there are nonzero
4044 elements in the constructor, or if the constructor has side effects,
4045 add assignments to the individual scalar fields of the object. */
4046 if (!cleared
4047 || num_nonzero_elements > 0
4048 || ctor_has_side_effects_p)
4049 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4051 *expr_p = NULL_TREE;
4053 break;
4055 case COMPLEX_TYPE:
4057 tree r, i;
4059 if (notify_temp_creation)
4060 return GS_OK;
4062 /* Extract the real and imaginary parts out of the ctor. */
4063 gcc_assert (elts->length () == 2);
4064 r = (*elts)[0].value;
4065 i = (*elts)[1].value;
4066 if (r == NULL || i == NULL)
4068 tree zero = build_zero_cst (TREE_TYPE (type));
4069 if (r == NULL)
4070 r = zero;
4071 if (i == NULL)
4072 i = zero;
4075 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4076 represent creation of a complex value. */
4077 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4079 ctor = build_complex (type, r, i);
4080 TREE_OPERAND (*expr_p, 1) = ctor;
4082 else
4084 ctor = build2 (COMPLEX_EXPR, type, r, i);
4085 TREE_OPERAND (*expr_p, 1) = ctor;
4086 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4087 pre_p,
4088 post_p,
4089 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4090 fb_rvalue);
4093 break;
4095 case VECTOR_TYPE:
4097 unsigned HOST_WIDE_INT ix;
4098 constructor_elt *ce;
4100 if (notify_temp_creation)
4101 return GS_OK;
4103 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4104 if (TREE_CONSTANT (ctor))
4106 bool constant_p = true;
4107 tree value;
4109 /* Even when ctor is constant, it might contain non-*_CST
4110 elements, such as addresses or trapping values like
4111 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4112 in VECTOR_CST nodes. */
4113 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4114 if (!CONSTANT_CLASS_P (value))
4116 constant_p = false;
4117 break;
4120 if (constant_p)
4122 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4123 break;
4126 TREE_CONSTANT (ctor) = 0;
4129 /* Vector types use CONSTRUCTOR all the way through gimple
4130 compilation as a general initializer. */
4131 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4133 enum gimplify_status tret;
4134 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4135 fb_rvalue);
4136 if (tret == GS_ERROR)
4137 ret = GS_ERROR;
4139 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4140 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4142 break;
4144 default:
4145 /* So how did we get a CONSTRUCTOR for a scalar type? */
4146 gcc_unreachable ();
4149 if (ret == GS_ERROR)
4150 return GS_ERROR;
4151 else if (want_value)
4153 *expr_p = object;
4154 return GS_OK;
4156 else
4158 /* If we have gimplified both sides of the initializer but have
4159 not emitted an assignment, do so now. */
4160 if (*expr_p)
4162 tree lhs = TREE_OPERAND (*expr_p, 0);
4163 tree rhs = TREE_OPERAND (*expr_p, 1);
4164 gassign *init = gimple_build_assign (lhs, rhs);
4165 gimplify_seq_add_stmt (pre_p, init);
4166 *expr_p = NULL;
4169 return GS_ALL_DONE;
4173 /* Given a pointer value OP0, return a simplified version of an
4174 indirection through OP0, or NULL_TREE if no simplification is
4175 possible. This may only be applied to a rhs of an expression.
4176 Note that the resulting type may be different from the type pointed
4177 to in the sense that it is still compatible from the langhooks
4178 point of view. */
4180 static tree
4181 gimple_fold_indirect_ref_rhs (tree t)
4183 return gimple_fold_indirect_ref (t);
4186 /* Subroutine of gimplify_modify_expr to do simplifications of
4187 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4188 something changes. */
4190 static enum gimplify_status
4191 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4192 gimple_seq *pre_p, gimple_seq *post_p,
4193 bool want_value)
4195 enum gimplify_status ret = GS_UNHANDLED;
4196 bool changed;
4200 changed = false;
4201 switch (TREE_CODE (*from_p))
4203 case VAR_DECL:
4204 /* If we're assigning from a read-only variable initialized with
4205 a constructor, do the direct assignment from the constructor,
4206 but only if neither source nor target are volatile since this
4207 latter assignment might end up being done on a per-field basis. */
4208 if (DECL_INITIAL (*from_p)
4209 && TREE_READONLY (*from_p)
4210 && !TREE_THIS_VOLATILE (*from_p)
4211 && !TREE_THIS_VOLATILE (*to_p)
4212 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4214 tree old_from = *from_p;
4215 enum gimplify_status subret;
4217 /* Move the constructor into the RHS. */
4218 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4220 /* Let's see if gimplify_init_constructor will need to put
4221 it in memory. */
4222 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4223 false, true);
4224 if (subret == GS_ERROR)
4226 /* If so, revert the change. */
4227 *from_p = old_from;
4229 else
4231 ret = GS_OK;
4232 changed = true;
4235 break;
4236 case INDIRECT_REF:
4238 /* If we have code like
4240 *(const A*)(A*)&x
4242 where the type of "x" is a (possibly cv-qualified variant
4243 of "A"), treat the entire expression as identical to "x".
4244 This kind of code arises in C++ when an object is bound
4245 to a const reference, and if "x" is a TARGET_EXPR we want
4246 to take advantage of the optimization below. */
4247 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4248 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4249 if (t)
4251 if (TREE_THIS_VOLATILE (t) != volatile_p)
4253 if (DECL_P (t))
4254 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4255 build_fold_addr_expr (t));
4256 if (REFERENCE_CLASS_P (t))
4257 TREE_THIS_VOLATILE (t) = volatile_p;
4259 *from_p = t;
4260 ret = GS_OK;
4261 changed = true;
4263 break;
4266 case TARGET_EXPR:
4268 /* If we are initializing something from a TARGET_EXPR, strip the
4269 TARGET_EXPR and initialize it directly, if possible. This can't
4270 be done if the initializer is void, since that implies that the
4271 temporary is set in some non-trivial way.
4273 ??? What about code that pulls out the temp and uses it
4274 elsewhere? I think that such code never uses the TARGET_EXPR as
4275 an initializer. If I'm wrong, we'll die because the temp won't
4276 have any RTL. In that case, I guess we'll need to replace
4277 references somehow. */
4278 tree init = TARGET_EXPR_INITIAL (*from_p);
4280 if (init
4281 && !VOID_TYPE_P (TREE_TYPE (init)))
4283 *from_p = init;
4284 ret = GS_OK;
4285 changed = true;
4288 break;
4290 case COMPOUND_EXPR:
4291 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4292 caught. */
4293 gimplify_compound_expr (from_p, pre_p, true);
4294 ret = GS_OK;
4295 changed = true;
4296 break;
4298 case CONSTRUCTOR:
4299 /* If we already made some changes, let the front end have a
4300 crack at this before we break it down. */
4301 if (ret != GS_UNHANDLED)
4302 break;
4303 /* If we're initializing from a CONSTRUCTOR, break this into
4304 individual MODIFY_EXPRs. */
4305 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4306 false);
4308 case COND_EXPR:
4309 /* If we're assigning to a non-register type, push the assignment
4310 down into the branches. This is mandatory for ADDRESSABLE types,
4311 since we cannot generate temporaries for such, but it saves a
4312 copy in other cases as well. */
4313 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4315 /* This code should mirror the code in gimplify_cond_expr. */
4316 enum tree_code code = TREE_CODE (*expr_p);
4317 tree cond = *from_p;
4318 tree result = *to_p;
4320 ret = gimplify_expr (&result, pre_p, post_p,
4321 is_gimple_lvalue, fb_lvalue);
4322 if (ret != GS_ERROR)
4323 ret = GS_OK;
4325 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4326 TREE_OPERAND (cond, 1)
4327 = build2 (code, void_type_node, result,
4328 TREE_OPERAND (cond, 1));
4329 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4330 TREE_OPERAND (cond, 2)
4331 = build2 (code, void_type_node, unshare_expr (result),
4332 TREE_OPERAND (cond, 2));
4334 TREE_TYPE (cond) = void_type_node;
4335 recalculate_side_effects (cond);
4337 if (want_value)
4339 gimplify_and_add (cond, pre_p);
4340 *expr_p = unshare_expr (result);
4342 else
4343 *expr_p = cond;
4344 return ret;
4346 break;
4348 case CALL_EXPR:
4349 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4350 return slot so that we don't generate a temporary. */
4351 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4352 && aggregate_value_p (*from_p, *from_p))
4354 bool use_target;
4356 if (!(rhs_predicate_for (*to_p))(*from_p))
4357 /* If we need a temporary, *to_p isn't accurate. */
4358 use_target = false;
4359 /* It's OK to use the return slot directly unless it's an NRV. */
4360 else if (TREE_CODE (*to_p) == RESULT_DECL
4361 && DECL_NAME (*to_p) == NULL_TREE
4362 && needs_to_live_in_memory (*to_p))
4363 use_target = true;
4364 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4365 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4366 /* Don't force regs into memory. */
4367 use_target = false;
4368 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4369 /* It's OK to use the target directly if it's being
4370 initialized. */
4371 use_target = true;
4372 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
4373 != INTEGER_CST)
4374 /* Always use the target and thus RSO for variable-sized types.
4375 GIMPLE cannot deal with a variable-sized assignment
4376 embedded in a call statement. */
4377 use_target = true;
4378 else if (TREE_CODE (*to_p) != SSA_NAME
4379 && (!is_gimple_variable (*to_p)
4380 || needs_to_live_in_memory (*to_p)))
4381 /* Don't use the original target if it's already addressable;
4382 if its address escapes, and the called function uses the
4383 NRV optimization, a conforming program could see *to_p
4384 change before the called function returns; see c++/19317.
4385 When optimizing, the return_slot pass marks more functions
4386 as safe after we have escape info. */
4387 use_target = false;
4388 else
4389 use_target = true;
4391 if (use_target)
4393 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4394 mark_addressable (*to_p);
4397 break;
4399 case WITH_SIZE_EXPR:
4400 /* Likewise for calls that return an aggregate of non-constant size,
4401 since we would not be able to generate a temporary at all. */
4402 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4404 *from_p = TREE_OPERAND (*from_p, 0);
4405 /* We don't change ret in this case because the
4406 WITH_SIZE_EXPR might have been added in
4407 gimplify_modify_expr, so returning GS_OK would lead to an
4408 infinite loop. */
4409 changed = true;
4411 break;
4413 /* If we're initializing from a container, push the initialization
4414 inside it. */
4415 case CLEANUP_POINT_EXPR:
4416 case BIND_EXPR:
4417 case STATEMENT_LIST:
4419 tree wrap = *from_p;
4420 tree t;
4422 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4423 fb_lvalue);
4424 if (ret != GS_ERROR)
4425 ret = GS_OK;
4427 t = voidify_wrapper_expr (wrap, *expr_p);
4428 gcc_assert (t == *expr_p);
4430 if (want_value)
4432 gimplify_and_add (wrap, pre_p);
4433 *expr_p = unshare_expr (*to_p);
4435 else
4436 *expr_p = wrap;
4437 return GS_OK;
4440 case COMPOUND_LITERAL_EXPR:
4442 tree complit = TREE_OPERAND (*expr_p, 1);
4443 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4444 tree decl = DECL_EXPR_DECL (decl_s);
4445 tree init = DECL_INITIAL (decl);
4447 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4448 into struct T x = { 0, 1, 2 } if the address of the
4449 compound literal has never been taken. */
4450 if (!TREE_ADDRESSABLE (complit)
4451 && !TREE_ADDRESSABLE (decl)
4452 && init)
4454 *expr_p = copy_node (*expr_p);
4455 TREE_OPERAND (*expr_p, 1) = init;
4456 return GS_OK;
4460 default:
4461 break;
4464 while (changed);
4466 return ret;
4470 /* Return true if T looks like a valid GIMPLE statement. */
4472 static bool
4473 is_gimple_stmt (tree t)
4475 const enum tree_code code = TREE_CODE (t);
4477 switch (code)
4479 case NOP_EXPR:
4480 /* The only valid NOP_EXPR is the empty statement. */
4481 return IS_EMPTY_STMT (t);
4483 case BIND_EXPR:
4484 case COND_EXPR:
4485 /* These are only valid if they're void. */
4486 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4488 case SWITCH_EXPR:
4489 case GOTO_EXPR:
4490 case RETURN_EXPR:
4491 case LABEL_EXPR:
4492 case CASE_LABEL_EXPR:
4493 case TRY_CATCH_EXPR:
4494 case TRY_FINALLY_EXPR:
4495 case EH_FILTER_EXPR:
4496 case CATCH_EXPR:
4497 case ASM_EXPR:
4498 case STATEMENT_LIST:
4499 case OACC_PARALLEL:
4500 case OACC_KERNELS:
4501 case OACC_DATA:
4502 case OACC_HOST_DATA:
4503 case OACC_DECLARE:
4504 case OACC_UPDATE:
4505 case OACC_ENTER_DATA:
4506 case OACC_EXIT_DATA:
4507 case OACC_CACHE:
4508 case OMP_PARALLEL:
4509 case OMP_FOR:
4510 case OMP_SIMD:
4511 case CILK_SIMD:
4512 case OMP_DISTRIBUTE:
4513 case OACC_LOOP:
4514 case OMP_SECTIONS:
4515 case OMP_SECTION:
4516 case OMP_SINGLE:
4517 case OMP_MASTER:
4518 case OMP_TASKGROUP:
4519 case OMP_ORDERED:
4520 case OMP_CRITICAL:
4521 case OMP_TASK:
4522 case OMP_TARGET:
4523 case OMP_TARGET_DATA:
4524 case OMP_TARGET_UPDATE:
4525 case OMP_TARGET_ENTER_DATA:
4526 case OMP_TARGET_EXIT_DATA:
4527 case OMP_TASKLOOP:
4528 case OMP_TEAMS:
4529 /* These are always void. */
4530 return true;
4532 case CALL_EXPR:
4533 case MODIFY_EXPR:
4534 case PREDICT_EXPR:
4535 /* These are valid regardless of their type. */
4536 return true;
4538 default:
4539 return false;
4544 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4545 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4546 DECL_GIMPLE_REG_P set.
4548 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4549 other, unmodified part of the complex object just before the total store.
4550 As a consequence, if the object is still uninitialized, an undefined value
4551 will be loaded into a register, which may result in a spurious exception
4552 if the register is floating-point and the value happens to be a signaling
4553 NaN for example. Then the fully-fledged complex operations lowering pass
4554 followed by a DCE pass are necessary in order to fix things up. */
4556 static enum gimplify_status
4557 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4558 bool want_value)
4560 enum tree_code code, ocode;
4561 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4563 lhs = TREE_OPERAND (*expr_p, 0);
4564 rhs = TREE_OPERAND (*expr_p, 1);
4565 code = TREE_CODE (lhs);
4566 lhs = TREE_OPERAND (lhs, 0);
4568 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4569 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4570 TREE_NO_WARNING (other) = 1;
4571 other = get_formal_tmp_var (other, pre_p);
4573 realpart = code == REALPART_EXPR ? rhs : other;
4574 imagpart = code == REALPART_EXPR ? other : rhs;
4576 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4577 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4578 else
4579 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4581 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4582 *expr_p = (want_value) ? rhs : NULL_TREE;
4584 return GS_ALL_DONE;
4587 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4589 modify_expr
4590 : varname '=' rhs
4591 | '*' ID '=' rhs
4593 PRE_P points to the list where side effects that must happen before
4594 *EXPR_P should be stored.
4596 POST_P points to the list where side effects that must happen after
4597 *EXPR_P should be stored.
4599 WANT_VALUE is nonzero iff we want to use the value of this expression
4600 in another expression. */
4602 static enum gimplify_status
4603 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4604 bool want_value)
4606 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4607 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4608 enum gimplify_status ret = GS_UNHANDLED;
4609 gimple *assign;
4610 location_t loc = EXPR_LOCATION (*expr_p);
4611 gimple_stmt_iterator gsi;
4613 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4614 || TREE_CODE (*expr_p) == INIT_EXPR);
4616 /* Trying to simplify a clobber using normal logic doesn't work,
4617 so handle it here. */
4618 if (TREE_CLOBBER_P (*from_p))
4620 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4621 if (ret == GS_ERROR)
4622 return ret;
4623 gcc_assert (!want_value
4624 && (TREE_CODE (*to_p) == VAR_DECL
4625 || TREE_CODE (*to_p) == MEM_REF));
4626 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4627 *expr_p = NULL;
4628 return GS_ALL_DONE;
4631 /* Insert pointer conversions required by the middle-end that are not
4632 required by the frontend. This fixes middle-end type checking for
4633 for example gcc.dg/redecl-6.c. */
4634 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4636 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4637 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4638 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4641 /* See if any simplifications can be done based on what the RHS is. */
4642 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4643 want_value);
4644 if (ret != GS_UNHANDLED)
4645 return ret;
4647 /* For zero sized types only gimplify the left hand side and right hand
4648 side as statements and throw away the assignment. Do this after
4649 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4650 types properly. */
4651 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4653 gimplify_stmt (from_p, pre_p);
4654 gimplify_stmt (to_p, pre_p);
4655 *expr_p = NULL_TREE;
4656 return GS_ALL_DONE;
4659 /* If the value being copied is of variable width, compute the length
4660 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4661 before gimplifying any of the operands so that we can resolve any
4662 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4663 the size of the expression to be copied, not of the destination, so
4664 that is what we must do here. */
4665 maybe_with_size_expr (from_p);
4667 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4668 if (ret == GS_ERROR)
4669 return ret;
4671 /* As a special case, we have to temporarily allow for assignments
4672 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4673 a toplevel statement, when gimplifying the GENERIC expression
4674 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4675 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4677 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4678 prevent gimplify_expr from trying to create a new temporary for
4679 foo's LHS, we tell it that it should only gimplify until it
4680 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4681 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4682 and all we need to do here is set 'a' to be its LHS. */
4683 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4684 fb_rvalue);
4685 if (ret == GS_ERROR)
4686 return ret;
4688 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
4689 size as argument to the call. */
4690 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4692 tree call = TREE_OPERAND (*from_p, 0);
4693 tree vlasize = TREE_OPERAND (*from_p, 1);
4695 if (TREE_CODE (call) == CALL_EXPR
4696 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
4698 int nargs = call_expr_nargs (call);
4699 tree type = TREE_TYPE (call);
4700 tree ap = CALL_EXPR_ARG (call, 0);
4701 tree tag = CALL_EXPR_ARG (call, 1);
4702 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
4703 IFN_VA_ARG, type,
4704 nargs + 1, ap, tag,
4705 vlasize);
4706 tree *call_p = &(TREE_OPERAND (*from_p, 0));
4707 *call_p = newcall;
4711 /* Now see if the above changed *from_p to something we handle specially. */
4712 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4713 want_value);
4714 if (ret != GS_UNHANDLED)
4715 return ret;
4717 /* If we've got a variable sized assignment between two lvalues (i.e. does
4718 not involve a call), then we can make things a bit more straightforward
4719 by converting the assignment to memcpy or memset. */
4720 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4722 tree from = TREE_OPERAND (*from_p, 0);
4723 tree size = TREE_OPERAND (*from_p, 1);
4725 if (TREE_CODE (from) == CONSTRUCTOR)
4726 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4728 if (is_gimple_addressable (from))
4730 *from_p = from;
4731 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4732 pre_p);
4736 /* Transform partial stores to non-addressable complex variables into
4737 total stores. This allows us to use real instead of virtual operands
4738 for these variables, which improves optimization. */
4739 if ((TREE_CODE (*to_p) == REALPART_EXPR
4740 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4741 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4742 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4744 /* Try to alleviate the effects of the gimplification creating artificial
4745 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
4746 make sure not to create DECL_DEBUG_EXPR links across functions. */
4747 if (!gimplify_ctxp->into_ssa
4748 && TREE_CODE (*from_p) == VAR_DECL
4749 && DECL_IGNORED_P (*from_p)
4750 && DECL_P (*to_p)
4751 && !DECL_IGNORED_P (*to_p)
4752 && decl_function_context (*to_p) == current_function_decl)
4754 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4755 DECL_NAME (*from_p)
4756 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4757 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4758 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4761 if (want_value && TREE_THIS_VOLATILE (*to_p))
4762 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4764 if (TREE_CODE (*from_p) == CALL_EXPR)
4766 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4767 instead of a GIMPLE_ASSIGN. */
4768 gcall *call_stmt;
4769 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
4771 /* Gimplify internal functions created in the FEs. */
4772 int nargs = call_expr_nargs (*from_p), i;
4773 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
4774 auto_vec<tree> vargs (nargs);
4776 for (i = 0; i < nargs; i++)
4778 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
4779 EXPR_LOCATION (*from_p));
4780 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
4782 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
4783 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
4785 else
4787 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4788 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4789 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4790 tree fndecl = get_callee_fndecl (*from_p);
4791 if (fndecl
4792 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4793 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4794 && call_expr_nargs (*from_p) == 3)
4795 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4796 CALL_EXPR_ARG (*from_p, 0),
4797 CALL_EXPR_ARG (*from_p, 1),
4798 CALL_EXPR_ARG (*from_p, 2));
4799 else
4801 call_stmt = gimple_build_call_from_tree (*from_p);
4802 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
4805 notice_special_calls (call_stmt);
4806 if (!gimple_call_noreturn_p (call_stmt))
4807 gimple_call_set_lhs (call_stmt, *to_p);
4808 assign = call_stmt;
4810 else
4812 assign = gimple_build_assign (*to_p, *from_p);
4813 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4816 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4818 /* We should have got an SSA name from the start. */
4819 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4822 gimplify_seq_add_stmt (pre_p, assign);
4823 gsi = gsi_last (*pre_p);
4824 maybe_fold_stmt (&gsi);
4826 if (want_value)
4828 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4829 return GS_OK;
4831 else
4832 *expr_p = NULL;
4834 return GS_ALL_DONE;
4837 /* Gimplify a comparison between two variable-sized objects. Do this
4838 with a call to BUILT_IN_MEMCMP. */
4840 static enum gimplify_status
4841 gimplify_variable_sized_compare (tree *expr_p)
4843 location_t loc = EXPR_LOCATION (*expr_p);
4844 tree op0 = TREE_OPERAND (*expr_p, 0);
4845 tree op1 = TREE_OPERAND (*expr_p, 1);
4846 tree t, arg, dest, src, expr;
4848 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4849 arg = unshare_expr (arg);
4850 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4851 src = build_fold_addr_expr_loc (loc, op1);
4852 dest = build_fold_addr_expr_loc (loc, op0);
4853 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4854 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4856 expr
4857 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4858 SET_EXPR_LOCATION (expr, loc);
4859 *expr_p = expr;
4861 return GS_OK;
4864 /* Gimplify a comparison between two aggregate objects of integral scalar
4865 mode as a comparison between the bitwise equivalent scalar values. */
4867 static enum gimplify_status
4868 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4870 location_t loc = EXPR_LOCATION (*expr_p);
4871 tree op0 = TREE_OPERAND (*expr_p, 0);
4872 tree op1 = TREE_OPERAND (*expr_p, 1);
4874 tree type = TREE_TYPE (op0);
4875 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4877 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4878 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4880 *expr_p
4881 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4883 return GS_OK;
4886 /* Gimplify an expression sequence. This function gimplifies each
4887 expression and rewrites the original expression with the last
4888 expression of the sequence in GIMPLE form.
4890 PRE_P points to the list where the side effects for all the
4891 expressions in the sequence will be emitted.
4893 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4895 static enum gimplify_status
4896 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4898 tree t = *expr_p;
4902 tree *sub_p = &TREE_OPERAND (t, 0);
4904 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4905 gimplify_compound_expr (sub_p, pre_p, false);
4906 else
4907 gimplify_stmt (sub_p, pre_p);
4909 t = TREE_OPERAND (t, 1);
4911 while (TREE_CODE (t) == COMPOUND_EXPR);
4913 *expr_p = t;
4914 if (want_value)
4915 return GS_OK;
4916 else
4918 gimplify_stmt (expr_p, pre_p);
4919 return GS_ALL_DONE;
4923 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4924 gimplify. After gimplification, EXPR_P will point to a new temporary
4925 that holds the original value of the SAVE_EXPR node.
4927 PRE_P points to the list where side effects that must happen before
4928 *EXPR_P should be stored. */
4930 static enum gimplify_status
4931 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4933 enum gimplify_status ret = GS_ALL_DONE;
4934 tree val;
4936 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4937 val = TREE_OPERAND (*expr_p, 0);
4939 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4940 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4942 /* The operand may be a void-valued expression such as SAVE_EXPRs
4943 generated by the Java frontend for class initialization. It is
4944 being executed only for its side-effects. */
4945 if (TREE_TYPE (val) == void_type_node)
4947 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4948 is_gimple_stmt, fb_none);
4949 val = NULL;
4951 else
4952 val = get_initialized_tmp_var (val, pre_p, post_p);
4954 TREE_OPERAND (*expr_p, 0) = val;
4955 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4958 *expr_p = val;
4960 return ret;
4963 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4965 unary_expr
4966 : ...
4967 | '&' varname
4970 PRE_P points to the list where side effects that must happen before
4971 *EXPR_P should be stored.
4973 POST_P points to the list where side effects that must happen after
4974 *EXPR_P should be stored. */
4976 static enum gimplify_status
4977 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4979 tree expr = *expr_p;
4980 tree op0 = TREE_OPERAND (expr, 0);
4981 enum gimplify_status ret;
4982 location_t loc = EXPR_LOCATION (*expr_p);
4984 switch (TREE_CODE (op0))
4986 case INDIRECT_REF:
4987 do_indirect_ref:
4988 /* Check if we are dealing with an expression of the form '&*ptr'.
4989 While the front end folds away '&*ptr' into 'ptr', these
4990 expressions may be generated internally by the compiler (e.g.,
4991 builtins like __builtin_va_end). */
4992 /* Caution: the silent array decomposition semantics we allow for
4993 ADDR_EXPR means we can't always discard the pair. */
4994 /* Gimplification of the ADDR_EXPR operand may drop
4995 cv-qualification conversions, so make sure we add them if
4996 needed. */
4998 tree op00 = TREE_OPERAND (op0, 0);
4999 tree t_expr = TREE_TYPE (expr);
5000 tree t_op00 = TREE_TYPE (op00);
5002 if (!useless_type_conversion_p (t_expr, t_op00))
5003 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5004 *expr_p = op00;
5005 ret = GS_OK;
5007 break;
5009 case VIEW_CONVERT_EXPR:
5010 /* Take the address of our operand and then convert it to the type of
5011 this ADDR_EXPR.
5013 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5014 all clear. The impact of this transformation is even less clear. */
5016 /* If the operand is a useless conversion, look through it. Doing so
5017 guarantees that the ADDR_EXPR and its operand will remain of the
5018 same type. */
5019 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5020 op0 = TREE_OPERAND (op0, 0);
5022 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5023 build_fold_addr_expr_loc (loc,
5024 TREE_OPERAND (op0, 0)));
5025 ret = GS_OK;
5026 break;
5028 case MEM_REF:
5029 if (integer_zerop (TREE_OPERAND (op0, 1)))
5030 goto do_indirect_ref;
5032 /* ... fall through ... */
5034 default:
5035 /* If we see a call to a declared builtin or see its address
5036 being taken (we can unify those cases here) then we can mark
5037 the builtin for implicit generation by GCC. */
5038 if (TREE_CODE (op0) == FUNCTION_DECL
5039 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5040 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5041 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5043 /* We use fb_either here because the C frontend sometimes takes
5044 the address of a call that returns a struct; see
5045 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5046 the implied temporary explicit. */
5048 /* Make the operand addressable. */
5049 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5050 is_gimple_addressable, fb_either);
5051 if (ret == GS_ERROR)
5052 break;
5054 /* Then mark it. Beware that it may not be possible to do so directly
5055 if a temporary has been created by the gimplification. */
5056 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5058 op0 = TREE_OPERAND (expr, 0);
5060 /* For various reasons, the gimplification of the expression
5061 may have made a new INDIRECT_REF. */
5062 if (TREE_CODE (op0) == INDIRECT_REF)
5063 goto do_indirect_ref;
5065 mark_addressable (TREE_OPERAND (expr, 0));
5067 /* The FEs may end up building ADDR_EXPRs early on a decl with
5068 an incomplete type. Re-build ADDR_EXPRs in canonical form
5069 here. */
5070 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5071 *expr_p = build_fold_addr_expr (op0);
5073 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5074 recompute_tree_invariant_for_addr_expr (*expr_p);
5076 /* If we re-built the ADDR_EXPR add a conversion to the original type
5077 if required. */
5078 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5079 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5081 break;
5084 return ret;
5087 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5088 value; output operands should be a gimple lvalue. */
5090 static enum gimplify_status
5091 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5093 tree expr;
5094 int noutputs;
5095 const char **oconstraints;
5096 int i;
5097 tree link;
5098 const char *constraint;
5099 bool allows_mem, allows_reg, is_inout;
5100 enum gimplify_status ret, tret;
5101 gasm *stmt;
5102 vec<tree, va_gc> *inputs;
5103 vec<tree, va_gc> *outputs;
5104 vec<tree, va_gc> *clobbers;
5105 vec<tree, va_gc> *labels;
5106 tree link_next;
5108 expr = *expr_p;
5109 noutputs = list_length (ASM_OUTPUTS (expr));
5110 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5112 inputs = NULL;
5113 outputs = NULL;
5114 clobbers = NULL;
5115 labels = NULL;
5117 ret = GS_ALL_DONE;
5118 link_next = NULL_TREE;
5119 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5121 bool ok;
5122 size_t constraint_len;
5124 link_next = TREE_CHAIN (link);
5126 oconstraints[i]
5127 = constraint
5128 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5129 constraint_len = strlen (constraint);
5130 if (constraint_len == 0)
5131 continue;
5133 ok = parse_output_constraint (&constraint, i, 0, 0,
5134 &allows_mem, &allows_reg, &is_inout);
5135 if (!ok)
5137 ret = GS_ERROR;
5138 is_inout = false;
5141 if (!allows_reg && allows_mem)
5142 mark_addressable (TREE_VALUE (link));
5144 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5145 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5146 fb_lvalue | fb_mayfail);
5147 if (tret == GS_ERROR)
5149 error ("invalid lvalue in asm output %d", i);
5150 ret = tret;
5153 vec_safe_push (outputs, link);
5154 TREE_CHAIN (link) = NULL_TREE;
5156 if (is_inout)
5158 /* An input/output operand. To give the optimizers more
5159 flexibility, split it into separate input and output
5160 operands. */
5161 tree input;
5162 char buf[10];
5164 /* Turn the in/out constraint into an output constraint. */
5165 char *p = xstrdup (constraint);
5166 p[0] = '=';
5167 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5169 /* And add a matching input constraint. */
5170 if (allows_reg)
5172 sprintf (buf, "%d", i);
5174 /* If there are multiple alternatives in the constraint,
5175 handle each of them individually. Those that allow register
5176 will be replaced with operand number, the others will stay
5177 unchanged. */
5178 if (strchr (p, ',') != NULL)
5180 size_t len = 0, buflen = strlen (buf);
5181 char *beg, *end, *str, *dst;
5183 for (beg = p + 1;;)
5185 end = strchr (beg, ',');
5186 if (end == NULL)
5187 end = strchr (beg, '\0');
5188 if ((size_t) (end - beg) < buflen)
5189 len += buflen + 1;
5190 else
5191 len += end - beg + 1;
5192 if (*end)
5193 beg = end + 1;
5194 else
5195 break;
5198 str = (char *) alloca (len);
5199 for (beg = p + 1, dst = str;;)
5201 const char *tem;
5202 bool mem_p, reg_p, inout_p;
5204 end = strchr (beg, ',');
5205 if (end)
5206 *end = '\0';
5207 beg[-1] = '=';
5208 tem = beg - 1;
5209 parse_output_constraint (&tem, i, 0, 0,
5210 &mem_p, &reg_p, &inout_p);
5211 if (dst != str)
5212 *dst++ = ',';
5213 if (reg_p)
5215 memcpy (dst, buf, buflen);
5216 dst += buflen;
5218 else
5220 if (end)
5221 len = end - beg;
5222 else
5223 len = strlen (beg);
5224 memcpy (dst, beg, len);
5225 dst += len;
5227 if (end)
5228 beg = end + 1;
5229 else
5230 break;
5232 *dst = '\0';
5233 input = build_string (dst - str, str);
5235 else
5236 input = build_string (strlen (buf), buf);
5238 else
5239 input = build_string (constraint_len - 1, constraint + 1);
5241 free (p);
5243 input = build_tree_list (build_tree_list (NULL_TREE, input),
5244 unshare_expr (TREE_VALUE (link)));
5245 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5249 link_next = NULL_TREE;
5250 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5252 link_next = TREE_CHAIN (link);
5253 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5254 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5255 oconstraints, &allows_mem, &allows_reg);
5257 /* If we can't make copies, we can only accept memory. */
5258 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5260 if (allows_mem)
5261 allows_reg = 0;
5262 else
5264 error ("impossible constraint in %<asm%>");
5265 error ("non-memory input %d must stay in memory", i);
5266 return GS_ERROR;
5270 /* If the operand is a memory input, it should be an lvalue. */
5271 if (!allows_reg && allows_mem)
5273 tree inputv = TREE_VALUE (link);
5274 STRIP_NOPS (inputv);
5275 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5276 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5277 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5278 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
5279 || TREE_CODE (inputv) == MODIFY_EXPR)
5280 TREE_VALUE (link) = error_mark_node;
5281 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5282 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5283 mark_addressable (TREE_VALUE (link));
5284 if (tret == GS_ERROR)
5286 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5287 input_location = EXPR_LOCATION (TREE_VALUE (link));
5288 error ("memory input %d is not directly addressable", i);
5289 ret = tret;
5292 else
5294 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5295 is_gimple_asm_val, fb_rvalue);
5296 if (tret == GS_ERROR)
5297 ret = tret;
5300 TREE_CHAIN (link) = NULL_TREE;
5301 vec_safe_push (inputs, link);
5304 link_next = NULL_TREE;
5305 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5307 link_next = TREE_CHAIN (link);
5308 TREE_CHAIN (link) = NULL_TREE;
5309 vec_safe_push (clobbers, link);
5312 link_next = NULL_TREE;
5313 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5315 link_next = TREE_CHAIN (link);
5316 TREE_CHAIN (link) = NULL_TREE;
5317 vec_safe_push (labels, link);
5320 /* Do not add ASMs with errors to the gimple IL stream. */
5321 if (ret != GS_ERROR)
5323 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5324 inputs, outputs, clobbers, labels);
5326 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
5327 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5329 gimplify_seq_add_stmt (pre_p, stmt);
5332 return ret;
5335 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5336 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5337 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5338 return to this function.
5340 FIXME should we complexify the prequeue handling instead? Or use flags
5341 for all the cleanups and let the optimizer tighten them up? The current
5342 code seems pretty fragile; it will break on a cleanup within any
5343 non-conditional nesting. But any such nesting would be broken, anyway;
5344 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5345 and continues out of it. We can do that at the RTL level, though, so
5346 having an optimizer to tighten up try/finally regions would be a Good
5347 Thing. */
5349 static enum gimplify_status
5350 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5352 gimple_stmt_iterator iter;
5353 gimple_seq body_sequence = NULL;
5355 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5357 /* We only care about the number of conditions between the innermost
5358 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5359 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5360 int old_conds = gimplify_ctxp->conditions;
5361 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5362 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5363 gimplify_ctxp->conditions = 0;
5364 gimplify_ctxp->conditional_cleanups = NULL;
5365 gimplify_ctxp->in_cleanup_point_expr = true;
5367 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5369 gimplify_ctxp->conditions = old_conds;
5370 gimplify_ctxp->conditional_cleanups = old_cleanups;
5371 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5373 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5375 gimple *wce = gsi_stmt (iter);
5377 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5379 if (gsi_one_before_end_p (iter))
5381 /* Note that gsi_insert_seq_before and gsi_remove do not
5382 scan operands, unlike some other sequence mutators. */
5383 if (!gimple_wce_cleanup_eh_only (wce))
5384 gsi_insert_seq_before_without_update (&iter,
5385 gimple_wce_cleanup (wce),
5386 GSI_SAME_STMT);
5387 gsi_remove (&iter, true);
5388 break;
5390 else
5392 gtry *gtry;
5393 gimple_seq seq;
5394 enum gimple_try_flags kind;
5396 if (gimple_wce_cleanup_eh_only (wce))
5397 kind = GIMPLE_TRY_CATCH;
5398 else
5399 kind = GIMPLE_TRY_FINALLY;
5400 seq = gsi_split_seq_after (iter);
5402 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5403 /* Do not use gsi_replace here, as it may scan operands.
5404 We want to do a simple structural modification only. */
5405 gsi_set_stmt (&iter, gtry);
5406 iter = gsi_start (gtry->eval);
5409 else
5410 gsi_next (&iter);
5413 gimplify_seq_add_seq (pre_p, body_sequence);
5414 if (temp)
5416 *expr_p = temp;
5417 return GS_OK;
5419 else
5421 *expr_p = NULL;
5422 return GS_ALL_DONE;
5426 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5427 is the cleanup action required. EH_ONLY is true if the cleanup should
5428 only be executed if an exception is thrown, not on normal exit. */
5430 static void
5431 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5433 gimple *wce;
5434 gimple_seq cleanup_stmts = NULL;
5436 /* Errors can result in improperly nested cleanups. Which results in
5437 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5438 if (seen_error ())
5439 return;
5441 if (gimple_conditional_context ())
5443 /* If we're in a conditional context, this is more complex. We only
5444 want to run the cleanup if we actually ran the initialization that
5445 necessitates it, but we want to run it after the end of the
5446 conditional context. So we wrap the try/finally around the
5447 condition and use a flag to determine whether or not to actually
5448 run the destructor. Thus
5450 test ? f(A()) : 0
5452 becomes (approximately)
5454 flag = 0;
5455 try {
5456 if (test) { A::A(temp); flag = 1; val = f(temp); }
5457 else { val = 0; }
5458 } finally {
5459 if (flag) A::~A(temp);
5463 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5464 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
5465 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
5467 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5468 gimplify_stmt (&cleanup, &cleanup_stmts);
5469 wce = gimple_build_wce (cleanup_stmts);
5471 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5472 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5473 gimplify_seq_add_stmt (pre_p, ftrue);
5475 /* Because of this manipulation, and the EH edges that jump
5476 threading cannot redirect, the temporary (VAR) will appear
5477 to be used uninitialized. Don't warn. */
5478 TREE_NO_WARNING (var) = 1;
5480 else
5482 gimplify_stmt (&cleanup, &cleanup_stmts);
5483 wce = gimple_build_wce (cleanup_stmts);
5484 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5485 gimplify_seq_add_stmt (pre_p, wce);
5489 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5491 static enum gimplify_status
5492 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5494 tree targ = *expr_p;
5495 tree temp = TARGET_EXPR_SLOT (targ);
5496 tree init = TARGET_EXPR_INITIAL (targ);
5497 enum gimplify_status ret;
5499 if (init)
5501 tree cleanup = NULL_TREE;
5503 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5504 to the temps list. Handle also variable length TARGET_EXPRs. */
5505 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5507 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5508 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5509 gimplify_vla_decl (temp, pre_p);
5511 else
5512 gimple_add_tmp_var (temp);
5514 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5515 expression is supposed to initialize the slot. */
5516 if (VOID_TYPE_P (TREE_TYPE (init)))
5517 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5518 else
5520 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5521 init = init_expr;
5522 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5523 init = NULL;
5524 ggc_free (init_expr);
5526 if (ret == GS_ERROR)
5528 /* PR c++/28266 Make sure this is expanded only once. */
5529 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5530 return GS_ERROR;
5532 if (init)
5533 gimplify_and_add (init, pre_p);
5535 /* If needed, push the cleanup for the temp. */
5536 if (TARGET_EXPR_CLEANUP (targ))
5538 if (CLEANUP_EH_ONLY (targ))
5539 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5540 CLEANUP_EH_ONLY (targ), pre_p);
5541 else
5542 cleanup = TARGET_EXPR_CLEANUP (targ);
5545 /* Add a clobber for the temporary going out of scope, like
5546 gimplify_bind_expr. */
5547 if (gimplify_ctxp->in_cleanup_point_expr
5548 && needs_to_live_in_memory (temp)
5549 && flag_stack_reuse == SR_ALL)
5551 tree clobber = build_constructor (TREE_TYPE (temp),
5552 NULL);
5553 TREE_THIS_VOLATILE (clobber) = true;
5554 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5555 if (cleanup)
5556 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5557 clobber);
5558 else
5559 cleanup = clobber;
5562 if (cleanup)
5563 gimple_push_cleanup (temp, cleanup, false, pre_p);
5565 /* Only expand this once. */
5566 TREE_OPERAND (targ, 3) = init;
5567 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5569 else
5570 /* We should have expanded this before. */
5571 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5573 *expr_p = temp;
5574 return GS_OK;
5577 /* Gimplification of expression trees. */
5579 /* Gimplify an expression which appears at statement context. The
5580 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5581 NULL, a new sequence is allocated.
5583 Return true if we actually added a statement to the queue. */
5585 bool
5586 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5588 gimple_seq_node last;
5590 last = gimple_seq_last (*seq_p);
5591 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5592 return last != gimple_seq_last (*seq_p);
5595 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5596 to CTX. If entries already exist, force them to be some flavor of private.
5597 If there is no enclosing parallel, do nothing. */
5599 void
5600 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5602 splay_tree_node n;
5604 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
5605 return;
5609 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5610 if (n != NULL)
5612 if (n->value & GOVD_SHARED)
5613 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5614 else if (n->value & GOVD_MAP)
5615 n->value |= GOVD_MAP_TO_ONLY;
5616 else
5617 return;
5619 else if ((ctx->region_type & ORT_TARGET) != 0)
5621 if (ctx->target_map_scalars_firstprivate)
5622 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5623 else
5624 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5626 else if (ctx->region_type != ORT_WORKSHARE
5627 && ctx->region_type != ORT_SIMD
5628 && ctx->region_type != ORT_ACC
5629 && !(ctx->region_type & ORT_TARGET_DATA))
5630 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5632 ctx = ctx->outer_context;
5634 while (ctx);
5637 /* Similarly for each of the type sizes of TYPE. */
5639 static void
5640 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5642 if (type == NULL || type == error_mark_node)
5643 return;
5644 type = TYPE_MAIN_VARIANT (type);
5646 if (ctx->privatized_types->add (type))
5647 return;
5649 switch (TREE_CODE (type))
5651 case INTEGER_TYPE:
5652 case ENUMERAL_TYPE:
5653 case BOOLEAN_TYPE:
5654 case REAL_TYPE:
5655 case FIXED_POINT_TYPE:
5656 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5657 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5658 break;
5660 case ARRAY_TYPE:
5661 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5662 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5663 break;
5665 case RECORD_TYPE:
5666 case UNION_TYPE:
5667 case QUAL_UNION_TYPE:
5669 tree field;
5670 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5671 if (TREE_CODE (field) == FIELD_DECL)
5673 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5674 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5677 break;
5679 case POINTER_TYPE:
5680 case REFERENCE_TYPE:
5681 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5682 break;
5684 default:
5685 break;
5688 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5689 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5690 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5693 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
5695 static void
5696 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5698 splay_tree_node n;
5699 unsigned int nflags;
5700 tree t;
5702 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
5703 return;
5705 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5706 there are constructors involved somewhere. */
5707 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5708 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5709 flags |= GOVD_SEEN;
5711 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5712 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
5714 /* We shouldn't be re-adding the decl with the same data
5715 sharing class. */
5716 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5717 nflags = n->value | flags;
5718 /* The only combination of data sharing classes we should see is
5719 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
5720 reduction variables to be used in data sharing clauses. */
5721 gcc_assert ((ctx->region_type & ORT_ACC) != 0
5722 || ((nflags & GOVD_DATA_SHARE_CLASS)
5723 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
5724 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5725 n->value = nflags;
5726 return;
5729 /* When adding a variable-sized variable, we have to handle all sorts
5730 of additional bits of data: the pointer replacement variable, and
5731 the parameters of the type. */
5732 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5734 /* Add the pointer replacement variable as PRIVATE if the variable
5735 replacement is private, else FIRSTPRIVATE since we'll need the
5736 address of the original variable either for SHARED, or for the
5737 copy into or out of the context. */
5738 if (!(flags & GOVD_LOCAL))
5740 if (flags & GOVD_MAP)
5741 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
5742 else if (flags & GOVD_PRIVATE)
5743 nflags = GOVD_PRIVATE;
5744 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
5745 && (flags & GOVD_FIRSTPRIVATE))
5746 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
5747 else
5748 nflags = GOVD_FIRSTPRIVATE;
5749 nflags |= flags & GOVD_SEEN;
5750 t = DECL_VALUE_EXPR (decl);
5751 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5752 t = TREE_OPERAND (t, 0);
5753 gcc_assert (DECL_P (t));
5754 omp_add_variable (ctx, t, nflags);
5757 /* Add all of the variable and type parameters (which should have
5758 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5759 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5760 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5761 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5763 /* The variable-sized variable itself is never SHARED, only some form
5764 of PRIVATE. The sharing would take place via the pointer variable
5765 which we remapped above. */
5766 if (flags & GOVD_SHARED)
5767 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5768 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5770 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5771 alloca statement we generate for the variable, so make sure it
5772 is available. This isn't automatically needed for the SHARED
5773 case, since we won't be allocating local storage then.
5774 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5775 in this case omp_notice_variable will be called later
5776 on when it is gimplified. */
5777 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5778 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5779 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5781 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5782 && lang_hooks.decls.omp_privatize_by_reference (decl))
5784 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5786 /* Similar to the direct variable sized case above, we'll need the
5787 size of references being privatized. */
5788 if ((flags & GOVD_SHARED) == 0)
5790 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5791 if (DECL_P (t))
5792 omp_notice_variable (ctx, t, true);
5796 if (n != NULL)
5797 n->value |= flags;
5798 else
5799 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5802 /* Notice a threadprivate variable DECL used in OMP context CTX.
5803 This just prints out diagnostics about threadprivate variable uses
5804 in untied tasks. If DECL2 is non-NULL, prevent this warning
5805 on that variable. */
5807 static bool
5808 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5809 tree decl2)
5811 splay_tree_node n;
5812 struct gimplify_omp_ctx *octx;
5814 for (octx = ctx; octx; octx = octx->outer_context)
5815 if ((octx->region_type & ORT_TARGET) != 0)
5817 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5818 if (n == NULL)
5820 error ("threadprivate variable %qE used in target region",
5821 DECL_NAME (decl));
5822 error_at (octx->location, "enclosing target region");
5823 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5825 if (decl2)
5826 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5829 if (ctx->region_type != ORT_UNTIED_TASK)
5830 return false;
5831 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5832 if (n == NULL)
5834 error ("threadprivate variable %qE used in untied task",
5835 DECL_NAME (decl));
5836 error_at (ctx->location, "enclosing task");
5837 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5839 if (decl2)
5840 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5841 return false;
5844 /* Return true if global var DECL is device resident. */
5846 static bool
5847 device_resident_p (tree decl)
5849 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
5851 if (!attr)
5852 return false;
5854 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
5856 tree c = TREE_VALUE (t);
5857 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
5858 return true;
5861 return false;
5864 /* Determine outer default flags for DECL mentioned in an OMP region
5865 but not declared in an enclosing clause.
5867 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5868 remapped firstprivate instead of shared. To some extent this is
5869 addressed in omp_firstprivatize_type_sizes, but not
5870 effectively. */
5872 static unsigned
5873 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
5874 bool in_code, unsigned flags)
5876 enum omp_clause_default_kind default_kind = ctx->default_kind;
5877 enum omp_clause_default_kind kind;
5879 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5880 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5881 default_kind = kind;
5883 switch (default_kind)
5885 case OMP_CLAUSE_DEFAULT_NONE:
5887 const char *rtype;
5889 if (ctx->region_type & ORT_PARALLEL)
5890 rtype = "parallel";
5891 else if (ctx->region_type & ORT_TASK)
5892 rtype = "task";
5893 else if (ctx->region_type & ORT_TEAMS)
5894 rtype = "teams";
5895 else
5896 gcc_unreachable ();
5898 error ("%qE not specified in enclosing %s",
5899 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
5900 error_at (ctx->location, "enclosing %s", rtype);
5902 /* FALLTHRU */
5903 case OMP_CLAUSE_DEFAULT_SHARED:
5904 flags |= GOVD_SHARED;
5905 break;
5906 case OMP_CLAUSE_DEFAULT_PRIVATE:
5907 flags |= GOVD_PRIVATE;
5908 break;
5909 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5910 flags |= GOVD_FIRSTPRIVATE;
5911 break;
5912 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5913 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5914 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5915 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
5917 omp_notice_variable (octx, decl, in_code);
5918 for (; octx; octx = octx->outer_context)
5920 splay_tree_node n2;
5922 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5923 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
5924 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
5925 continue;
5926 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5928 flags |= GOVD_FIRSTPRIVATE;
5929 goto found_outer;
5931 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5933 flags |= GOVD_SHARED;
5934 goto found_outer;
5939 if (TREE_CODE (decl) == PARM_DECL
5940 || (!is_global_var (decl)
5941 && DECL_CONTEXT (decl) == current_function_decl))
5942 flags |= GOVD_FIRSTPRIVATE;
5943 else
5944 flags |= GOVD_SHARED;
5945 found_outer:
5946 break;
5948 default:
5949 gcc_unreachable ();
5952 return flags;
5956 /* Determine outer default flags for DECL mentioned in an OACC region
5957 but not declared in an enclosing clause. */
5959 static unsigned
5960 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
5962 const char *rkind;
5963 bool on_device = false;
5965 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
5966 && is_global_var (decl)
5967 && device_resident_p (decl))
5969 on_device = true;
5970 flags |= GOVD_MAP_TO_ONLY;
5973 switch (ctx->region_type)
5975 default:
5976 gcc_unreachable ();
5978 case ORT_ACC_KERNELS:
5979 /* Everything under kernels are default 'present_or_copy'. */
5980 flags |= GOVD_MAP;
5981 rkind = "kernels";
5982 break;
5984 case ORT_ACC_PARALLEL:
5986 tree type = TREE_TYPE (decl);
5988 if (TREE_CODE (type) == REFERENCE_TYPE
5989 || POINTER_TYPE_P (type))
5990 type = TREE_TYPE (type);
5992 if (on_device || AGGREGATE_TYPE_P (type))
5993 /* Aggregates default to 'present_or_copy'. */
5994 flags |= GOVD_MAP;
5995 else
5996 /* Scalars default to 'firstprivate'. */
5997 flags |= GOVD_FIRSTPRIVATE;
5998 rkind = "parallel";
6000 break;
6003 if (DECL_ARTIFICIAL (decl))
6004 ; /* We can get compiler-generated decls, and should not complain
6005 about them. */
6006 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
6008 error ("%qE not specified in enclosing OpenACC %qs construct",
6009 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
6010 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
6012 else
6013 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
6015 return flags;
6018 /* Record the fact that DECL was used within the OMP context CTX.
6019 IN_CODE is true when real code uses DECL, and false when we should
6020 merely emit default(none) errors. Return true if DECL is going to
6021 be remapped and thus DECL shouldn't be gimplified into its
6022 DECL_VALUE_EXPR (if any). */
6024 static bool
6025 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
6027 splay_tree_node n;
6028 unsigned flags = in_code ? GOVD_SEEN : 0;
6029 bool ret = false, shared;
6031 if (error_operand_p (decl))
6032 return false;
6034 if (ctx->region_type == ORT_NONE)
6035 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
6037 /* Threadprivate variables are predetermined. */
6038 if (is_global_var (decl))
6040 if (DECL_THREAD_LOCAL_P (decl))
6041 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
6043 if (DECL_HAS_VALUE_EXPR_P (decl))
6045 tree value = get_base_address (DECL_VALUE_EXPR (decl));
6047 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
6048 return omp_notice_threadprivate_variable (ctx, decl, value);
6052 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6053 if ((ctx->region_type & ORT_TARGET) != 0)
6055 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
6056 if (n == NULL)
6058 unsigned nflags = flags;
6059 if (ctx->target_map_pointers_as_0len_arrays
6060 || ctx->target_map_scalars_firstprivate)
6062 bool is_declare_target = false;
6063 bool is_scalar = false;
6064 if (is_global_var (decl)
6065 && varpool_node::get_create (decl)->offloadable)
6067 struct gimplify_omp_ctx *octx;
6068 for (octx = ctx->outer_context;
6069 octx; octx = octx->outer_context)
6071 n = splay_tree_lookup (octx->variables,
6072 (splay_tree_key)decl);
6073 if (n
6074 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
6075 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6076 break;
6078 is_declare_target = octx == NULL;
6080 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
6082 tree type = TREE_TYPE (decl);
6083 if (TREE_CODE (type) == REFERENCE_TYPE)
6084 type = TREE_TYPE (type);
6085 if (TREE_CODE (type) == COMPLEX_TYPE)
6086 type = TREE_TYPE (type);
6087 if (INTEGRAL_TYPE_P (type)
6088 || SCALAR_FLOAT_TYPE_P (type)
6089 || TREE_CODE (type) == POINTER_TYPE)
6090 is_scalar = true;
6092 if (is_declare_target)
6094 else if (ctx->target_map_pointers_as_0len_arrays
6095 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
6096 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
6097 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
6098 == POINTER_TYPE)))
6099 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
6100 else if (is_scalar)
6101 nflags |= GOVD_FIRSTPRIVATE;
6104 struct gimplify_omp_ctx *octx = ctx->outer_context;
6105 if ((ctx->region_type & ORT_ACC) && octx)
6107 /* Look in outer OpenACC contexts, to see if there's a
6108 data attribute for this variable. */
6109 omp_notice_variable (octx, decl, in_code);
6111 for (; octx; octx = octx->outer_context)
6113 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
6114 break;
6115 splay_tree_node n2
6116 = splay_tree_lookup (octx->variables,
6117 (splay_tree_key) decl);
6118 if (n2)
6120 nflags |= GOVD_MAP;
6121 goto found_outer;
6127 tree type = TREE_TYPE (decl);
6129 if (nflags == flags
6130 && gimplify_omp_ctxp->target_firstprivatize_array_bases
6131 && lang_hooks.decls.omp_privatize_by_reference (decl))
6132 type = TREE_TYPE (type);
6133 if (nflags == flags
6134 && !lang_hooks.types.omp_mappable_type (type))
6136 error ("%qD referenced in target region does not have "
6137 "a mappable type", decl);
6138 nflags |= GOVD_MAP | GOVD_EXPLICIT;
6140 else if (nflags == flags)
6142 if ((ctx->region_type & ORT_ACC) != 0)
6143 nflags = oacc_default_clause (ctx, decl, flags);
6144 else
6145 nflags |= GOVD_MAP;
6148 found_outer:
6149 omp_add_variable (ctx, decl, nflags);
6151 else
6153 /* If nothing changed, there's nothing left to do. */
6154 if ((n->value & flags) == flags)
6155 return ret;
6156 n->value |= flags;
6158 goto do_outer;
6161 if (n == NULL)
6163 if (ctx->region_type == ORT_WORKSHARE
6164 || ctx->region_type == ORT_SIMD
6165 || ctx->region_type == ORT_ACC
6166 || (ctx->region_type & ORT_TARGET_DATA) != 0)
6167 goto do_outer;
6169 flags = omp_default_clause (ctx, decl, in_code, flags);
6171 if ((flags & GOVD_PRIVATE)
6172 && lang_hooks.decls.omp_private_outer_ref (decl))
6173 flags |= GOVD_PRIVATE_OUTER_REF;
6175 omp_add_variable (ctx, decl, flags);
6177 shared = (flags & GOVD_SHARED) != 0;
6178 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6179 goto do_outer;
6182 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
6183 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
6184 && DECL_SIZE (decl)
6185 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6187 splay_tree_node n2;
6188 tree t = DECL_VALUE_EXPR (decl);
6189 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6190 t = TREE_OPERAND (t, 0);
6191 gcc_assert (DECL_P (t));
6192 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6193 n2->value |= GOVD_SEEN;
6196 shared = ((flags | n->value) & GOVD_SHARED) != 0;
6197 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6199 /* If nothing changed, there's nothing left to do. */
6200 if ((n->value & flags) == flags)
6201 return ret;
6202 flags |= n->value;
6203 n->value = flags;
6205 do_outer:
6206 /* If the variable is private in the current context, then we don't
6207 need to propagate anything to an outer context. */
6208 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
6209 return ret;
6210 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6211 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6212 return ret;
6213 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6214 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6215 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6216 return ret;
6217 if (ctx->outer_context
6218 && omp_notice_variable (ctx->outer_context, decl, in_code))
6219 return true;
6220 return ret;
6223 /* Verify that DECL is private within CTX. If there's specific information
6224 to the contrary in the innermost scope, generate an error. */
6226 static bool
6227 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
6229 splay_tree_node n;
6231 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6232 if (n != NULL)
6234 if (n->value & GOVD_SHARED)
6236 if (ctx == gimplify_omp_ctxp)
6238 if (simd)
6239 error ("iteration variable %qE is predetermined linear",
6240 DECL_NAME (decl));
6241 else
6242 error ("iteration variable %qE should be private",
6243 DECL_NAME (decl));
6244 n->value = GOVD_PRIVATE;
6245 return true;
6247 else
6248 return false;
6250 else if ((n->value & GOVD_EXPLICIT) != 0
6251 && (ctx == gimplify_omp_ctxp
6252 || (ctx->region_type == ORT_COMBINED_PARALLEL
6253 && gimplify_omp_ctxp->outer_context == ctx)))
6255 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
6256 error ("iteration variable %qE should not be firstprivate",
6257 DECL_NAME (decl));
6258 else if ((n->value & GOVD_REDUCTION) != 0)
6259 error ("iteration variable %qE should not be reduction",
6260 DECL_NAME (decl));
6261 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
6262 error ("iteration variable %qE should not be linear",
6263 DECL_NAME (decl));
6264 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
6265 error ("iteration variable %qE should not be lastprivate",
6266 DECL_NAME (decl));
6267 else if (simd && (n->value & GOVD_PRIVATE) != 0)
6268 error ("iteration variable %qE should not be private",
6269 DECL_NAME (decl));
6270 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
6271 error ("iteration variable %qE is predetermined linear",
6272 DECL_NAME (decl));
6274 return (ctx == gimplify_omp_ctxp
6275 || (ctx->region_type == ORT_COMBINED_PARALLEL
6276 && gimplify_omp_ctxp->outer_context == ctx));
6279 if (ctx->region_type != ORT_WORKSHARE
6280 && ctx->region_type != ORT_SIMD
6281 && ctx->region_type != ORT_ACC)
6282 return false;
6283 else if (ctx->outer_context)
6284 return omp_is_private (ctx->outer_context, decl, simd);
6285 return false;
6288 /* Return true if DECL is private within a parallel region
6289 that binds to the current construct's context or in parallel
6290 region's REDUCTION clause. */
6292 static bool
6293 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
6295 splay_tree_node n;
6299 ctx = ctx->outer_context;
6300 if (ctx == NULL)
6302 if (is_global_var (decl))
6303 return false;
6305 /* References might be private, but might be shared too,
6306 when checking for copyprivate, assume they might be
6307 private, otherwise assume they might be shared. */
6308 if (copyprivate)
6309 return true;
6311 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6312 return false;
6314 /* Treat C++ privatized non-static data members outside
6315 of the privatization the same. */
6316 if (omp_member_access_dummy_var (decl))
6317 return false;
6319 return true;
6322 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6324 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6325 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
6326 continue;
6328 if (n != NULL)
6330 if ((n->value & GOVD_LOCAL) != 0
6331 && omp_member_access_dummy_var (decl))
6332 return false;
6333 return (n->value & GOVD_SHARED) == 0;
6336 while (ctx->region_type == ORT_WORKSHARE
6337 || ctx->region_type == ORT_SIMD
6338 || ctx->region_type == ORT_ACC);
6339 return false;
6342 /* Return true if the CTX is combined with distribute and thus
6343 lastprivate can't be supported. */
6345 static bool
6346 omp_no_lastprivate (struct gimplify_omp_ctx *ctx)
6350 if (ctx->outer_context == NULL)
6351 return false;
6352 ctx = ctx->outer_context;
6353 switch (ctx->region_type)
6355 case ORT_WORKSHARE:
6356 if (!ctx->combined_loop)
6357 return false;
6358 if (ctx->distribute)
6359 return lang_GNU_Fortran ();
6360 break;
6361 case ORT_COMBINED_PARALLEL:
6362 break;
6363 case ORT_COMBINED_TEAMS:
6364 return lang_GNU_Fortran ();
6365 default:
6366 return false;
6369 while (1);
6372 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
6374 static tree
6375 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
6377 tree t = *tp;
6379 /* If this node has been visited, unmark it and keep looking. */
6380 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
6381 return t;
6383 if (IS_TYPE_OR_DECL_P (t))
6384 *walk_subtrees = 0;
6385 return NULL_TREE;
6388 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
6389 and previous omp contexts. */
6391 static void
6392 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6393 enum omp_region_type region_type,
6394 enum tree_code code)
6396 struct gimplify_omp_ctx *ctx, *outer_ctx;
6397 tree c;
6398 hash_map<tree, tree> *struct_map_to_clause = NULL;
6399 tree *prev_list_p = NULL;
6401 ctx = new_omp_context (region_type);
6402 outer_ctx = ctx->outer_context;
6403 if (code == OMP_TARGET && !lang_GNU_Fortran ())
6405 ctx->target_map_pointers_as_0len_arrays = true;
6406 /* FIXME: For Fortran we want to set this too, when
6407 the Fortran FE is updated to OpenMP 4.5. */
6408 ctx->target_map_scalars_firstprivate = true;
6410 if (!lang_GNU_Fortran ())
6411 switch (code)
6413 case OMP_TARGET:
6414 case OMP_TARGET_DATA:
6415 case OMP_TARGET_ENTER_DATA:
6416 case OMP_TARGET_EXIT_DATA:
6417 ctx->target_firstprivatize_array_bases = true;
6418 default:
6419 break;
6422 while ((c = *list_p) != NULL)
6424 bool remove = false;
6425 bool notice_outer = true;
6426 const char *check_non_private = NULL;
6427 unsigned int flags;
6428 tree decl;
6430 switch (OMP_CLAUSE_CODE (c))
6432 case OMP_CLAUSE_PRIVATE:
6433 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6434 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6436 flags |= GOVD_PRIVATE_OUTER_REF;
6437 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6439 else
6440 notice_outer = false;
6441 goto do_add;
6442 case OMP_CLAUSE_SHARED:
6443 flags = GOVD_SHARED | GOVD_EXPLICIT;
6444 goto do_add;
6445 case OMP_CLAUSE_FIRSTPRIVATE:
6446 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6447 check_non_private = "firstprivate";
6448 goto do_add;
6449 case OMP_CLAUSE_LASTPRIVATE:
6450 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6451 check_non_private = "lastprivate";
6452 decl = OMP_CLAUSE_DECL (c);
6453 if (omp_no_lastprivate (ctx))
6455 notice_outer = false;
6456 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6458 else if (error_operand_p (decl))
6459 goto do_add;
6460 else if (outer_ctx
6461 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
6462 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
6463 && splay_tree_lookup (outer_ctx->variables,
6464 (splay_tree_key) decl) == NULL)
6466 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
6467 if (outer_ctx->outer_context)
6468 omp_notice_variable (outer_ctx->outer_context, decl, true);
6470 else if (outer_ctx
6471 && (outer_ctx->region_type & ORT_TASK) != 0
6472 && outer_ctx->combined_loop
6473 && splay_tree_lookup (outer_ctx->variables,
6474 (splay_tree_key) decl) == NULL)
6476 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6477 if (outer_ctx->outer_context)
6478 omp_notice_variable (outer_ctx->outer_context, decl, true);
6480 else if (outer_ctx
6481 && (outer_ctx->region_type == ORT_WORKSHARE
6482 || outer_ctx->region_type == ORT_ACC)
6483 && outer_ctx->combined_loop
6484 && splay_tree_lookup (outer_ctx->variables,
6485 (splay_tree_key) decl) == NULL
6486 && !omp_check_private (outer_ctx, decl, false))
6488 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6489 if (outer_ctx->outer_context
6490 && (outer_ctx->outer_context->region_type
6491 == ORT_COMBINED_PARALLEL)
6492 && splay_tree_lookup (outer_ctx->outer_context->variables,
6493 (splay_tree_key) decl) == NULL)
6495 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
6496 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
6497 if (octx->outer_context)
6498 omp_notice_variable (octx->outer_context, decl, true);
6500 else if (outer_ctx->outer_context)
6501 omp_notice_variable (outer_ctx->outer_context, decl, true);
6503 goto do_add;
6504 case OMP_CLAUSE_REDUCTION:
6505 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6506 /* OpenACC permits reductions on private variables. */
6507 if (!(region_type & ORT_ACC))
6508 check_non_private = "reduction";
6509 decl = OMP_CLAUSE_DECL (c);
6510 if (TREE_CODE (decl) == MEM_REF)
6512 tree type = TREE_TYPE (decl);
6513 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
6514 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6516 remove = true;
6517 break;
6519 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6520 if (DECL_P (v))
6522 omp_firstprivatize_variable (ctx, v);
6523 omp_notice_variable (ctx, v, true);
6525 decl = TREE_OPERAND (decl, 0);
6526 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
6528 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
6529 NULL, is_gimple_val, fb_rvalue)
6530 == GS_ERROR)
6532 remove = true;
6533 break;
6535 v = TREE_OPERAND (decl, 1);
6536 if (DECL_P (v))
6538 omp_firstprivatize_variable (ctx, v);
6539 omp_notice_variable (ctx, v, true);
6541 decl = TREE_OPERAND (decl, 0);
6543 if (TREE_CODE (decl) == ADDR_EXPR
6544 || TREE_CODE (decl) == INDIRECT_REF)
6545 decl = TREE_OPERAND (decl, 0);
6547 goto do_add_decl;
6548 case OMP_CLAUSE_LINEAR:
6549 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6550 is_gimple_val, fb_rvalue) == GS_ERROR)
6552 remove = true;
6553 break;
6555 else
6557 if (code == OMP_SIMD
6558 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6560 struct gimplify_omp_ctx *octx = outer_ctx;
6561 if (octx
6562 && octx->region_type == ORT_WORKSHARE
6563 && octx->combined_loop
6564 && !octx->distribute)
6566 if (octx->outer_context
6567 && (octx->outer_context->region_type
6568 == ORT_COMBINED_PARALLEL))
6569 octx = octx->outer_context->outer_context;
6570 else
6571 octx = octx->outer_context;
6573 if (octx
6574 && octx->region_type == ORT_WORKSHARE
6575 && octx->combined_loop
6576 && octx->distribute
6577 && !lang_GNU_Fortran ())
6579 error_at (OMP_CLAUSE_LOCATION (c),
6580 "%<linear%> clause for variable other than "
6581 "loop iterator specified on construct "
6582 "combined with %<distribute%>");
6583 remove = true;
6584 break;
6587 /* For combined #pragma omp parallel for simd, need to put
6588 lastprivate and perhaps firstprivate too on the
6589 parallel. Similarly for #pragma omp for simd. */
6590 struct gimplify_omp_ctx *octx = outer_ctx;
6591 decl = NULL_TREE;
6592 if (omp_no_lastprivate (ctx))
6593 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6596 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6597 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6598 break;
6599 decl = OMP_CLAUSE_DECL (c);
6600 if (error_operand_p (decl))
6602 decl = NULL_TREE;
6603 break;
6605 flags = GOVD_SEEN;
6606 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6607 flags |= GOVD_FIRSTPRIVATE;
6608 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6609 flags |= GOVD_LASTPRIVATE;
6610 if (octx
6611 && octx->region_type == ORT_WORKSHARE
6612 && octx->combined_loop)
6614 if (octx->outer_context
6615 && (octx->outer_context->region_type
6616 == ORT_COMBINED_PARALLEL))
6617 octx = octx->outer_context;
6618 else if (omp_check_private (octx, decl, false))
6619 break;
6621 else if (octx
6622 && (octx->region_type & ORT_TASK) != 0
6623 && octx->combined_loop)
6625 else if (octx
6626 && octx->region_type == ORT_COMBINED_PARALLEL
6627 && ctx->region_type == ORT_WORKSHARE
6628 && octx == outer_ctx)
6629 flags = GOVD_SEEN | GOVD_SHARED;
6630 else if (octx
6631 && octx->region_type == ORT_COMBINED_TEAMS)
6632 flags = GOVD_SEEN | GOVD_SHARED;
6633 else if (octx
6634 && octx->region_type == ORT_COMBINED_TARGET)
6636 flags &= ~GOVD_LASTPRIVATE;
6637 if (flags == GOVD_SEEN)
6638 break;
6640 else
6641 break;
6642 splay_tree_node on
6643 = splay_tree_lookup (octx->variables,
6644 (splay_tree_key) decl);
6645 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
6647 octx = NULL;
6648 break;
6650 omp_add_variable (octx, decl, flags);
6651 if (octx->outer_context == NULL)
6652 break;
6653 octx = octx->outer_context;
6655 while (1);
6656 if (octx
6657 && decl
6658 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6659 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6660 omp_notice_variable (octx, decl, true);
6662 flags = GOVD_LINEAR | GOVD_EXPLICIT;
6663 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6664 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6666 notice_outer = false;
6667 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6669 goto do_add;
6671 case OMP_CLAUSE_MAP:
6672 decl = OMP_CLAUSE_DECL (c);
6673 if (error_operand_p (decl))
6674 remove = true;
6675 switch (code)
6677 case OMP_TARGET:
6678 break;
6679 case OMP_TARGET_DATA:
6680 case OMP_TARGET_ENTER_DATA:
6681 case OMP_TARGET_EXIT_DATA:
6682 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6683 || (OMP_CLAUSE_MAP_KIND (c)
6684 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6685 /* For target {,enter ,exit }data only the array slice is
6686 mapped, but not the pointer to it. */
6687 remove = true;
6688 break;
6689 default:
6690 break;
6692 if (remove)
6693 break;
6694 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6695 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6696 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6697 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6698 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6700 remove = true;
6701 break;
6703 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6704 || (OMP_CLAUSE_MAP_KIND (c)
6705 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6706 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
6708 OMP_CLAUSE_SIZE (c)
6709 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL);
6710 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
6711 GOVD_FIRSTPRIVATE | GOVD_SEEN);
6713 if (!DECL_P (decl))
6715 tree d = decl, *pd;
6716 if (TREE_CODE (d) == ARRAY_REF)
6718 while (TREE_CODE (d) == ARRAY_REF)
6719 d = TREE_OPERAND (d, 0);
6720 if (TREE_CODE (d) == COMPONENT_REF
6721 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
6722 decl = d;
6724 pd = &OMP_CLAUSE_DECL (c);
6725 if (d == decl
6726 && TREE_CODE (decl) == INDIRECT_REF
6727 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
6728 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
6729 == REFERENCE_TYPE))
6731 pd = &TREE_OPERAND (decl, 0);
6732 decl = TREE_OPERAND (decl, 0);
6734 if (TREE_CODE (decl) == COMPONENT_REF)
6736 while (TREE_CODE (decl) == COMPONENT_REF)
6737 decl = TREE_OPERAND (decl, 0);
6739 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
6740 == GS_ERROR)
6742 remove = true;
6743 break;
6745 if (DECL_P (decl))
6747 if (error_operand_p (decl))
6749 remove = true;
6750 break;
6753 if (TYPE_SIZE_UNIT (TREE_TYPE (decl)) == NULL
6754 || (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
6755 != INTEGER_CST))
6757 error_at (OMP_CLAUSE_LOCATION (c),
6758 "mapping field %qE of variable length "
6759 "structure", OMP_CLAUSE_DECL (c));
6760 remove = true;
6761 break;
6764 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
6766 /* Error recovery. */
6767 if (prev_list_p == NULL)
6769 remove = true;
6770 break;
6772 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
6774 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
6775 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
6777 remove = true;
6778 break;
6783 tree offset;
6784 HOST_WIDE_INT bitsize, bitpos;
6785 machine_mode mode;
6786 int unsignedp, reversep, volatilep = 0;
6787 tree base = OMP_CLAUSE_DECL (c);
6788 while (TREE_CODE (base) == ARRAY_REF)
6789 base = TREE_OPERAND (base, 0);
6790 if (TREE_CODE (base) == INDIRECT_REF)
6791 base = TREE_OPERAND (base, 0);
6792 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6793 &mode, &unsignedp, &reversep,
6794 &volatilep, false);
6795 gcc_assert (base == decl
6796 && (offset == NULL_TREE
6797 || TREE_CODE (offset) == INTEGER_CST));
6799 splay_tree_node n
6800 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6801 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
6802 == GOMP_MAP_ALWAYS_POINTER);
6803 if (n == NULL || (n->value & GOVD_MAP) == 0)
6805 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6806 OMP_CLAUSE_MAP);
6807 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
6808 OMP_CLAUSE_DECL (l) = decl;
6809 OMP_CLAUSE_SIZE (l) = size_int (1);
6810 if (struct_map_to_clause == NULL)
6811 struct_map_to_clause = new hash_map<tree, tree>;
6812 struct_map_to_clause->put (decl, l);
6813 if (ptr)
6815 enum gomp_map_kind mkind
6816 = code == OMP_TARGET_EXIT_DATA
6817 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
6818 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6819 OMP_CLAUSE_MAP);
6820 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
6821 OMP_CLAUSE_DECL (c2)
6822 = unshare_expr (OMP_CLAUSE_DECL (c));
6823 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
6824 OMP_CLAUSE_SIZE (c2)
6825 = TYPE_SIZE_UNIT (ptr_type_node);
6826 OMP_CLAUSE_CHAIN (l) = c2;
6827 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
6829 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
6830 tree c3
6831 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6832 OMP_CLAUSE_MAP);
6833 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
6834 OMP_CLAUSE_DECL (c3)
6835 = unshare_expr (OMP_CLAUSE_DECL (c4));
6836 OMP_CLAUSE_SIZE (c3)
6837 = TYPE_SIZE_UNIT (ptr_type_node);
6838 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
6839 OMP_CLAUSE_CHAIN (c2) = c3;
6841 *prev_list_p = l;
6842 prev_list_p = NULL;
6844 else
6846 OMP_CLAUSE_CHAIN (l) = c;
6847 *list_p = l;
6848 list_p = &OMP_CLAUSE_CHAIN (l);
6850 flags = GOVD_MAP | GOVD_EXPLICIT;
6851 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
6852 flags |= GOVD_SEEN;
6853 goto do_add_decl;
6855 else
6857 tree *osc = struct_map_to_clause->get (decl);
6858 tree *sc = NULL, *scp = NULL;
6859 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
6860 n->value |= GOVD_SEEN;
6861 offset_int o1, o2;
6862 if (offset)
6863 o1 = wi::to_offset (offset);
6864 else
6865 o1 = 0;
6866 if (bitpos)
6867 o1 = o1 + bitpos / BITS_PER_UNIT;
6868 for (sc = &OMP_CLAUSE_CHAIN (*osc);
6869 *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
6870 if (ptr && sc == prev_list_p)
6871 break;
6872 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6873 != COMPONENT_REF
6874 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6875 != INDIRECT_REF)
6876 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6877 != ARRAY_REF))
6878 break;
6879 else
6881 tree offset2;
6882 HOST_WIDE_INT bitsize2, bitpos2;
6883 base = OMP_CLAUSE_DECL (*sc);
6884 if (TREE_CODE (base) == ARRAY_REF)
6886 while (TREE_CODE (base) == ARRAY_REF)
6887 base = TREE_OPERAND (base, 0);
6888 if (TREE_CODE (base) != COMPONENT_REF
6889 || (TREE_CODE (TREE_TYPE (base))
6890 != ARRAY_TYPE))
6891 break;
6893 else if (TREE_CODE (base) == INDIRECT_REF
6894 && (TREE_CODE (TREE_OPERAND (base, 0))
6895 == COMPONENT_REF)
6896 && (TREE_CODE (TREE_TYPE
6897 (TREE_OPERAND (base, 0)))
6898 == REFERENCE_TYPE))
6899 base = TREE_OPERAND (base, 0);
6900 base = get_inner_reference (base, &bitsize2,
6901 &bitpos2, &offset2,
6902 &mode, &unsignedp,
6903 &reversep, &volatilep,
6904 false);
6905 if (base != decl)
6906 break;
6907 if (scp)
6908 continue;
6909 gcc_assert (offset == NULL_TREE
6910 || TREE_CODE (offset) == INTEGER_CST);
6911 tree d1 = OMP_CLAUSE_DECL (*sc);
6912 tree d2 = OMP_CLAUSE_DECL (c);
6913 while (TREE_CODE (d1) == ARRAY_REF)
6914 d1 = TREE_OPERAND (d1, 0);
6915 while (TREE_CODE (d2) == ARRAY_REF)
6916 d2 = TREE_OPERAND (d2, 0);
6917 if (TREE_CODE (d1) == INDIRECT_REF)
6918 d1 = TREE_OPERAND (d1, 0);
6919 if (TREE_CODE (d2) == INDIRECT_REF)
6920 d2 = TREE_OPERAND (d2, 0);
6921 while (TREE_CODE (d1) == COMPONENT_REF)
6922 if (TREE_CODE (d2) == COMPONENT_REF
6923 && TREE_OPERAND (d1, 1)
6924 == TREE_OPERAND (d2, 1))
6926 d1 = TREE_OPERAND (d1, 0);
6927 d2 = TREE_OPERAND (d2, 0);
6929 else
6930 break;
6931 if (d1 == d2)
6933 error_at (OMP_CLAUSE_LOCATION (c),
6934 "%qE appears more than once in map "
6935 "clauses", OMP_CLAUSE_DECL (c));
6936 remove = true;
6937 break;
6939 if (offset2)
6940 o2 = wi::to_offset (offset2);
6941 else
6942 o2 = 0;
6943 if (bitpos2)
6944 o2 = o2 + bitpos2 / BITS_PER_UNIT;
6945 if (wi::ltu_p (o1, o2)
6946 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
6948 if (ptr)
6949 scp = sc;
6950 else
6951 break;
6954 if (remove)
6955 break;
6956 OMP_CLAUSE_SIZE (*osc)
6957 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
6958 size_one_node);
6959 if (ptr)
6961 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6962 OMP_CLAUSE_MAP);
6963 tree cl = NULL_TREE;
6964 enum gomp_map_kind mkind
6965 = code == OMP_TARGET_EXIT_DATA
6966 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
6967 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
6968 OMP_CLAUSE_DECL (c2)
6969 = unshare_expr (OMP_CLAUSE_DECL (c));
6970 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
6971 OMP_CLAUSE_SIZE (c2)
6972 = TYPE_SIZE_UNIT (ptr_type_node);
6973 cl = scp ? *prev_list_p : c2;
6974 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
6976 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
6977 tree c3
6978 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6979 OMP_CLAUSE_MAP);
6980 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
6981 OMP_CLAUSE_DECL (c3)
6982 = unshare_expr (OMP_CLAUSE_DECL (c4));
6983 OMP_CLAUSE_SIZE (c3)
6984 = TYPE_SIZE_UNIT (ptr_type_node);
6985 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
6986 if (!scp)
6987 OMP_CLAUSE_CHAIN (c2) = c3;
6988 else
6989 cl = c3;
6991 if (scp)
6992 *scp = c2;
6993 if (sc == prev_list_p)
6995 *sc = cl;
6996 prev_list_p = NULL;
6998 else
7000 *prev_list_p = OMP_CLAUSE_CHAIN (c);
7001 list_p = prev_list_p;
7002 prev_list_p = NULL;
7003 OMP_CLAUSE_CHAIN (c) = *sc;
7004 *sc = cl;
7005 continue;
7008 else if (*sc != c)
7010 *list_p = OMP_CLAUSE_CHAIN (c);
7011 OMP_CLAUSE_CHAIN (c) = *sc;
7012 *sc = c;
7013 continue;
7017 if (!remove
7018 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
7019 && OMP_CLAUSE_CHAIN (c)
7020 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
7021 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
7022 == GOMP_MAP_ALWAYS_POINTER))
7023 prev_list_p = list_p;
7024 break;
7026 flags = GOVD_MAP | GOVD_EXPLICIT;
7027 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
7028 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
7029 flags |= GOVD_MAP_ALWAYS_TO;
7030 goto do_add;
7032 case OMP_CLAUSE_DEPEND:
7033 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
7034 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
7036 /* Nothing to do. OMP_CLAUSE_DECL will be lowered in
7037 omp-low.c. */
7038 break;
7040 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7042 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7043 NULL, is_gimple_val, fb_rvalue);
7044 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7046 if (error_operand_p (OMP_CLAUSE_DECL (c)))
7048 remove = true;
7049 break;
7051 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
7052 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7053 is_gimple_val, fb_rvalue) == GS_ERROR)
7055 remove = true;
7056 break;
7058 break;
7060 case OMP_CLAUSE_TO:
7061 case OMP_CLAUSE_FROM:
7062 case OMP_CLAUSE__CACHE_:
7063 decl = OMP_CLAUSE_DECL (c);
7064 if (error_operand_p (decl))
7066 remove = true;
7067 break;
7069 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7070 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7071 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7072 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7073 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7075 remove = true;
7076 break;
7078 if (!DECL_P (decl))
7080 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
7081 NULL, is_gimple_lvalue, fb_lvalue)
7082 == GS_ERROR)
7084 remove = true;
7085 break;
7087 break;
7089 goto do_notice;
7091 case OMP_CLAUSE_USE_DEVICE_PTR:
7092 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7093 goto do_add;
7094 case OMP_CLAUSE_IS_DEVICE_PTR:
7095 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7096 goto do_add;
7098 do_add:
7099 decl = OMP_CLAUSE_DECL (c);
7100 do_add_decl:
7101 if (error_operand_p (decl))
7103 remove = true;
7104 break;
7106 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
7108 tree t = omp_member_access_dummy_var (decl);
7109 if (t)
7111 tree v = DECL_VALUE_EXPR (decl);
7112 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
7113 if (outer_ctx)
7114 omp_notice_variable (outer_ctx, t, true);
7117 omp_add_variable (ctx, decl, flags);
7118 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7119 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7121 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
7122 GOVD_LOCAL | GOVD_SEEN);
7123 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
7124 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
7125 find_decl_expr,
7126 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7127 NULL) == NULL_TREE)
7128 omp_add_variable (ctx,
7129 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7130 GOVD_LOCAL | GOVD_SEEN);
7131 gimplify_omp_ctxp = ctx;
7132 push_gimplify_context ();
7134 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
7135 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7137 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
7138 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
7139 pop_gimplify_context
7140 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
7141 push_gimplify_context ();
7142 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
7143 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7144 pop_gimplify_context
7145 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
7146 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
7147 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
7149 gimplify_omp_ctxp = outer_ctx;
7151 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7152 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
7154 gimplify_omp_ctxp = ctx;
7155 push_gimplify_context ();
7156 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
7158 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7159 NULL, NULL);
7160 TREE_SIDE_EFFECTS (bind) = 1;
7161 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
7162 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
7164 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
7165 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7166 pop_gimplify_context
7167 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
7168 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
7170 gimplify_omp_ctxp = outer_ctx;
7172 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7173 && OMP_CLAUSE_LINEAR_STMT (c))
7175 gimplify_omp_ctxp = ctx;
7176 push_gimplify_context ();
7177 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
7179 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7180 NULL, NULL);
7181 TREE_SIDE_EFFECTS (bind) = 1;
7182 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
7183 OMP_CLAUSE_LINEAR_STMT (c) = bind;
7185 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
7186 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7187 pop_gimplify_context
7188 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
7189 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
7191 gimplify_omp_ctxp = outer_ctx;
7193 if (notice_outer)
7194 goto do_notice;
7195 break;
7197 case OMP_CLAUSE_COPYIN:
7198 case OMP_CLAUSE_COPYPRIVATE:
7199 decl = OMP_CLAUSE_DECL (c);
7200 if (error_operand_p (decl))
7202 remove = true;
7203 break;
7205 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
7206 && !remove
7207 && !omp_check_private (ctx, decl, true))
7209 remove = true;
7210 if (is_global_var (decl))
7212 if (DECL_THREAD_LOCAL_P (decl))
7213 remove = false;
7214 else if (DECL_HAS_VALUE_EXPR_P (decl))
7216 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7218 if (value
7219 && DECL_P (value)
7220 && DECL_THREAD_LOCAL_P (value))
7221 remove = false;
7224 if (remove)
7225 error_at (OMP_CLAUSE_LOCATION (c),
7226 "copyprivate variable %qE is not threadprivate"
7227 " or private in outer context", DECL_NAME (decl));
7229 do_notice:
7230 if (outer_ctx)
7231 omp_notice_variable (outer_ctx, decl, true);
7232 if (check_non_private
7233 && region_type == ORT_WORKSHARE
7234 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7235 || decl == OMP_CLAUSE_DECL (c)
7236 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
7237 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7238 == ADDR_EXPR
7239 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7240 == POINTER_PLUS_EXPR
7241 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
7242 (OMP_CLAUSE_DECL (c), 0), 0))
7243 == ADDR_EXPR)))))
7244 && omp_check_private (ctx, decl, false))
7246 error ("%s variable %qE is private in outer context",
7247 check_non_private, DECL_NAME (decl));
7248 remove = true;
7250 break;
7252 case OMP_CLAUSE_IF:
7253 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
7254 && OMP_CLAUSE_IF_MODIFIER (c) != code)
7256 const char *p[2];
7257 for (int i = 0; i < 2; i++)
7258 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
7260 case OMP_PARALLEL: p[i] = "parallel"; break;
7261 case OMP_TASK: p[i] = "task"; break;
7262 case OMP_TASKLOOP: p[i] = "taskloop"; break;
7263 case OMP_TARGET_DATA: p[i] = "target data"; break;
7264 case OMP_TARGET: p[i] = "target"; break;
7265 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
7266 case OMP_TARGET_ENTER_DATA:
7267 p[i] = "target enter data"; break;
7268 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
7269 default: gcc_unreachable ();
7271 error_at (OMP_CLAUSE_LOCATION (c),
7272 "expected %qs %<if%> clause modifier rather than %qs",
7273 p[0], p[1]);
7274 remove = true;
7276 /* Fall through. */
7278 case OMP_CLAUSE_FINAL:
7279 OMP_CLAUSE_OPERAND (c, 0)
7280 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
7281 /* Fall through. */
7283 case OMP_CLAUSE_SCHEDULE:
7284 case OMP_CLAUSE_NUM_THREADS:
7285 case OMP_CLAUSE_NUM_TEAMS:
7286 case OMP_CLAUSE_THREAD_LIMIT:
7287 case OMP_CLAUSE_DIST_SCHEDULE:
7288 case OMP_CLAUSE_DEVICE:
7289 case OMP_CLAUSE_PRIORITY:
7290 case OMP_CLAUSE_GRAINSIZE:
7291 case OMP_CLAUSE_NUM_TASKS:
7292 case OMP_CLAUSE_HINT:
7293 case OMP_CLAUSE__CILK_FOR_COUNT_:
7294 case OMP_CLAUSE_ASYNC:
7295 case OMP_CLAUSE_WAIT:
7296 case OMP_CLAUSE_NUM_GANGS:
7297 case OMP_CLAUSE_NUM_WORKERS:
7298 case OMP_CLAUSE_VECTOR_LENGTH:
7299 case OMP_CLAUSE_WORKER:
7300 case OMP_CLAUSE_VECTOR:
7301 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7302 is_gimple_val, fb_rvalue) == GS_ERROR)
7303 remove = true;
7304 break;
7306 case OMP_CLAUSE_GANG:
7307 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7308 is_gimple_val, fb_rvalue) == GS_ERROR)
7309 remove = true;
7310 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
7311 is_gimple_val, fb_rvalue) == GS_ERROR)
7312 remove = true;
7313 break;
7315 case OMP_CLAUSE_TILE:
7316 for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list;
7317 list = TREE_CHAIN (list))
7319 if (gimplify_expr (&TREE_VALUE (list), pre_p, NULL,
7320 is_gimple_val, fb_rvalue) == GS_ERROR)
7321 remove = true;
7323 break;
7325 case OMP_CLAUSE_DEVICE_RESIDENT:
7326 case OMP_CLAUSE_USE_DEVICE:
7327 remove = true;
7328 break;
7330 case OMP_CLAUSE_NOWAIT:
7331 case OMP_CLAUSE_ORDERED:
7332 case OMP_CLAUSE_UNTIED:
7333 case OMP_CLAUSE_COLLAPSE:
7334 case OMP_CLAUSE_AUTO:
7335 case OMP_CLAUSE_SEQ:
7336 case OMP_CLAUSE_INDEPENDENT:
7337 case OMP_CLAUSE_MERGEABLE:
7338 case OMP_CLAUSE_PROC_BIND:
7339 case OMP_CLAUSE_SAFELEN:
7340 case OMP_CLAUSE_SIMDLEN:
7341 case OMP_CLAUSE_NOGROUP:
7342 case OMP_CLAUSE_THREADS:
7343 case OMP_CLAUSE_SIMD:
7344 break;
7346 case OMP_CLAUSE_DEFAULTMAP:
7347 ctx->target_map_scalars_firstprivate = false;
7348 break;
7350 case OMP_CLAUSE_ALIGNED:
7351 decl = OMP_CLAUSE_DECL (c);
7352 if (error_operand_p (decl))
7354 remove = true;
7355 break;
7357 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
7358 is_gimple_val, fb_rvalue) == GS_ERROR)
7360 remove = true;
7361 break;
7363 if (!is_global_var (decl)
7364 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7365 omp_add_variable (ctx, decl, GOVD_ALIGNED);
7366 break;
7368 case OMP_CLAUSE_DEFAULT:
7369 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
7370 break;
7372 default:
7373 gcc_unreachable ();
7376 if (remove)
7377 *list_p = OMP_CLAUSE_CHAIN (c);
7378 else
7379 list_p = &OMP_CLAUSE_CHAIN (c);
7382 gimplify_omp_ctxp = ctx;
7383 if (struct_map_to_clause)
7384 delete struct_map_to_clause;
7387 struct gimplify_adjust_omp_clauses_data
7389 tree *list_p;
7390 gimple_seq *pre_p;
7393 /* For all variables that were not actually used within the context,
7394 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
7396 static int
7397 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
7399 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
7400 gimple_seq *pre_p
7401 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
7402 tree decl = (tree) n->key;
7403 unsigned flags = n->value;
7404 enum omp_clause_code code;
7405 tree clause;
7406 bool private_debug;
7408 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
7409 return 0;
7410 if ((flags & GOVD_SEEN) == 0)
7411 return 0;
7412 if (flags & GOVD_DEBUG_PRIVATE)
7414 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
7415 private_debug = true;
7417 else if (flags & GOVD_MAP)
7418 private_debug = false;
7419 else
7420 private_debug
7421 = lang_hooks.decls.omp_private_debug_clause (decl,
7422 !!(flags & GOVD_SHARED));
7423 if (private_debug)
7424 code = OMP_CLAUSE_PRIVATE;
7425 else if (flags & GOVD_MAP)
7426 code = OMP_CLAUSE_MAP;
7427 else if (flags & GOVD_SHARED)
7429 if (is_global_var (decl))
7431 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7432 while (ctx != NULL)
7434 splay_tree_node on
7435 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7436 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7437 | GOVD_PRIVATE | GOVD_REDUCTION
7438 | GOVD_LINEAR | GOVD_MAP)) != 0)
7439 break;
7440 ctx = ctx->outer_context;
7442 if (ctx == NULL)
7443 return 0;
7445 code = OMP_CLAUSE_SHARED;
7447 else if (flags & GOVD_PRIVATE)
7448 code = OMP_CLAUSE_PRIVATE;
7449 else if (flags & GOVD_FIRSTPRIVATE)
7450 code = OMP_CLAUSE_FIRSTPRIVATE;
7451 else if (flags & GOVD_LASTPRIVATE)
7452 code = OMP_CLAUSE_LASTPRIVATE;
7453 else if (flags & GOVD_ALIGNED)
7454 return 0;
7455 else
7456 gcc_unreachable ();
7458 clause = build_omp_clause (input_location, code);
7459 OMP_CLAUSE_DECL (clause) = decl;
7460 OMP_CLAUSE_CHAIN (clause) = *list_p;
7461 if (private_debug)
7462 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
7463 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
7464 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
7465 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
7467 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
7468 OMP_CLAUSE_DECL (nc) = decl;
7469 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7470 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
7471 OMP_CLAUSE_DECL (clause)
7472 = build_simple_mem_ref_loc (input_location, decl);
7473 OMP_CLAUSE_DECL (clause)
7474 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
7475 build_int_cst (build_pointer_type (char_type_node), 0));
7476 OMP_CLAUSE_SIZE (clause) = size_zero_node;
7477 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7478 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
7479 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
7480 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7481 OMP_CLAUSE_CHAIN (nc) = *list_p;
7482 OMP_CLAUSE_CHAIN (clause) = nc;
7483 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7484 gimplify_omp_ctxp = ctx->outer_context;
7485 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
7486 pre_p, NULL, is_gimple_val, fb_rvalue);
7487 gimplify_omp_ctxp = ctx;
7489 else if (code == OMP_CLAUSE_MAP)
7491 OMP_CLAUSE_SET_MAP_KIND (clause,
7492 flags & GOVD_MAP_TO_ONLY
7493 ? GOMP_MAP_TO
7494 : GOMP_MAP_TOFROM);
7495 if (DECL_SIZE (decl)
7496 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7498 tree decl2 = DECL_VALUE_EXPR (decl);
7499 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7500 decl2 = TREE_OPERAND (decl2, 0);
7501 gcc_assert (DECL_P (decl2));
7502 tree mem = build_simple_mem_ref (decl2);
7503 OMP_CLAUSE_DECL (clause) = mem;
7504 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7505 if (gimplify_omp_ctxp->outer_context)
7507 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7508 omp_notice_variable (ctx, decl2, true);
7509 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
7511 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7512 OMP_CLAUSE_MAP);
7513 OMP_CLAUSE_DECL (nc) = decl;
7514 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7515 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
7516 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7517 else
7518 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
7519 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7520 OMP_CLAUSE_CHAIN (clause) = nc;
7522 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7523 && lang_hooks.decls.omp_privatize_by_reference (decl))
7525 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
7526 OMP_CLAUSE_SIZE (clause)
7527 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
7528 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7529 gimplify_omp_ctxp = ctx->outer_context;
7530 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
7531 pre_p, NULL, is_gimple_val, fb_rvalue);
7532 gimplify_omp_ctxp = ctx;
7533 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7534 OMP_CLAUSE_MAP);
7535 OMP_CLAUSE_DECL (nc) = decl;
7536 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7537 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
7538 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7539 OMP_CLAUSE_CHAIN (clause) = nc;
7541 else
7542 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
7544 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
7546 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
7547 OMP_CLAUSE_DECL (nc) = decl;
7548 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
7549 OMP_CLAUSE_CHAIN (nc) = *list_p;
7550 OMP_CLAUSE_CHAIN (clause) = nc;
7551 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7552 gimplify_omp_ctxp = ctx->outer_context;
7553 lang_hooks.decls.omp_finish_clause (nc, pre_p);
7554 gimplify_omp_ctxp = ctx;
7556 *list_p = clause;
7557 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7558 gimplify_omp_ctxp = ctx->outer_context;
7559 lang_hooks.decls.omp_finish_clause (clause, pre_p);
7560 gimplify_omp_ctxp = ctx;
7561 return 0;
7564 static void
7565 gimplify_adjust_omp_clauses (gimple_seq *pre_p, tree *list_p,
7566 enum tree_code code)
7568 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7569 tree c, decl;
7571 while ((c = *list_p) != NULL)
7573 splay_tree_node n;
7574 bool remove = false;
7576 switch (OMP_CLAUSE_CODE (c))
7578 case OMP_CLAUSE_PRIVATE:
7579 case OMP_CLAUSE_SHARED:
7580 case OMP_CLAUSE_FIRSTPRIVATE:
7581 case OMP_CLAUSE_LINEAR:
7582 decl = OMP_CLAUSE_DECL (c);
7583 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7584 remove = !(n->value & GOVD_SEEN);
7585 if (! remove)
7587 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
7588 if ((n->value & GOVD_DEBUG_PRIVATE)
7589 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
7591 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
7592 || ((n->value & GOVD_DATA_SHARE_CLASS)
7593 == GOVD_PRIVATE));
7594 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
7595 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
7598 break;
7600 case OMP_CLAUSE_LASTPRIVATE:
7601 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
7602 accurately reflect the presence of a FIRSTPRIVATE clause. */
7603 decl = OMP_CLAUSE_DECL (c);
7604 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7605 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7606 = (n->value & GOVD_FIRSTPRIVATE) != 0;
7607 if (omp_no_lastprivate (ctx))
7609 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7610 remove = true;
7611 else
7612 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE;
7614 else if (code == OMP_DISTRIBUTE
7615 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7617 remove = true;
7618 error_at (OMP_CLAUSE_LOCATION (c),
7619 "same variable used in %<firstprivate%> and "
7620 "%<lastprivate%> clauses on %<distribute%> "
7621 "construct");
7623 break;
7625 case OMP_CLAUSE_ALIGNED:
7626 decl = OMP_CLAUSE_DECL (c);
7627 if (!is_global_var (decl))
7629 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7630 remove = n == NULL || !(n->value & GOVD_SEEN);
7631 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7633 struct gimplify_omp_ctx *octx;
7634 if (n != NULL
7635 && (n->value & (GOVD_DATA_SHARE_CLASS
7636 & ~GOVD_FIRSTPRIVATE)))
7637 remove = true;
7638 else
7639 for (octx = ctx->outer_context; octx;
7640 octx = octx->outer_context)
7642 n = splay_tree_lookup (octx->variables,
7643 (splay_tree_key) decl);
7644 if (n == NULL)
7645 continue;
7646 if (n->value & GOVD_LOCAL)
7647 break;
7648 /* We have to avoid assigning a shared variable
7649 to itself when trying to add
7650 __builtin_assume_aligned. */
7651 if (n->value & GOVD_SHARED)
7653 remove = true;
7654 break;
7659 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
7661 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7662 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7663 remove = true;
7665 break;
7667 case OMP_CLAUSE_MAP:
7668 if (code == OMP_TARGET_EXIT_DATA
7669 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7671 remove = true;
7672 break;
7674 decl = OMP_CLAUSE_DECL (c);
7675 if (!DECL_P (decl))
7677 if ((ctx->region_type & ORT_TARGET) != 0
7678 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
7680 if (TREE_CODE (decl) == INDIRECT_REF
7681 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7682 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7683 == REFERENCE_TYPE))
7684 decl = TREE_OPERAND (decl, 0);
7685 if (TREE_CODE (decl) == COMPONENT_REF)
7687 while (TREE_CODE (decl) == COMPONENT_REF)
7688 decl = TREE_OPERAND (decl, 0);
7689 if (DECL_P (decl))
7691 n = splay_tree_lookup (ctx->variables,
7692 (splay_tree_key) decl);
7693 if (!(n->value & GOVD_SEEN))
7694 remove = true;
7698 break;
7700 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7701 if ((ctx->region_type & ORT_TARGET) != 0
7702 && !(n->value & GOVD_SEEN)
7703 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0)
7705 remove = true;
7706 /* For struct element mapping, if struct is never referenced
7707 in target block and none of the mapping has always modifier,
7708 remove all the struct element mappings, which immediately
7709 follow the GOMP_MAP_STRUCT map clause. */
7710 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
7712 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
7713 while (cnt--)
7714 OMP_CLAUSE_CHAIN (c)
7715 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
7718 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
7719 && code == OMP_TARGET_EXIT_DATA)
7720 remove = true;
7721 else if (DECL_SIZE (decl)
7722 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
7723 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
7724 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
7725 && (OMP_CLAUSE_MAP_KIND (c)
7726 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7728 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
7729 for these, TREE_CODE (DECL_SIZE (decl)) will always be
7730 INTEGER_CST. */
7731 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
7733 tree decl2 = DECL_VALUE_EXPR (decl);
7734 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7735 decl2 = TREE_OPERAND (decl2, 0);
7736 gcc_assert (DECL_P (decl2));
7737 tree mem = build_simple_mem_ref (decl2);
7738 OMP_CLAUSE_DECL (c) = mem;
7739 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7740 if (ctx->outer_context)
7742 omp_notice_variable (ctx->outer_context, decl2, true);
7743 omp_notice_variable (ctx->outer_context,
7744 OMP_CLAUSE_SIZE (c), true);
7746 if (((ctx->region_type & ORT_TARGET) != 0
7747 || !ctx->target_firstprivatize_array_bases)
7748 && ((n->value & GOVD_SEEN) == 0
7749 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
7751 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7752 OMP_CLAUSE_MAP);
7753 OMP_CLAUSE_DECL (nc) = decl;
7754 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7755 if (ctx->target_firstprivatize_array_bases)
7756 OMP_CLAUSE_SET_MAP_KIND (nc,
7757 GOMP_MAP_FIRSTPRIVATE_POINTER);
7758 else
7759 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
7760 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
7761 OMP_CLAUSE_CHAIN (c) = nc;
7762 c = nc;
7765 else
7767 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7768 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
7769 gcc_assert ((n->value & GOVD_SEEN) == 0
7770 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
7771 == 0));
7773 break;
7775 case OMP_CLAUSE_TO:
7776 case OMP_CLAUSE_FROM:
7777 case OMP_CLAUSE__CACHE_:
7778 decl = OMP_CLAUSE_DECL (c);
7779 if (!DECL_P (decl))
7780 break;
7781 if (DECL_SIZE (decl)
7782 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7784 tree decl2 = DECL_VALUE_EXPR (decl);
7785 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7786 decl2 = TREE_OPERAND (decl2, 0);
7787 gcc_assert (DECL_P (decl2));
7788 tree mem = build_simple_mem_ref (decl2);
7789 OMP_CLAUSE_DECL (c) = mem;
7790 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7791 if (ctx->outer_context)
7793 omp_notice_variable (ctx->outer_context, decl2, true);
7794 omp_notice_variable (ctx->outer_context,
7795 OMP_CLAUSE_SIZE (c), true);
7798 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7799 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
7800 break;
7802 case OMP_CLAUSE_REDUCTION:
7803 case OMP_CLAUSE_COPYIN:
7804 case OMP_CLAUSE_COPYPRIVATE:
7805 case OMP_CLAUSE_IF:
7806 case OMP_CLAUSE_NUM_THREADS:
7807 case OMP_CLAUSE_NUM_TEAMS:
7808 case OMP_CLAUSE_THREAD_LIMIT:
7809 case OMP_CLAUSE_DIST_SCHEDULE:
7810 case OMP_CLAUSE_DEVICE:
7811 case OMP_CLAUSE_SCHEDULE:
7812 case OMP_CLAUSE_NOWAIT:
7813 case OMP_CLAUSE_ORDERED:
7814 case OMP_CLAUSE_DEFAULT:
7815 case OMP_CLAUSE_UNTIED:
7816 case OMP_CLAUSE_COLLAPSE:
7817 case OMP_CLAUSE_FINAL:
7818 case OMP_CLAUSE_MERGEABLE:
7819 case OMP_CLAUSE_PROC_BIND:
7820 case OMP_CLAUSE_SAFELEN:
7821 case OMP_CLAUSE_SIMDLEN:
7822 case OMP_CLAUSE_DEPEND:
7823 case OMP_CLAUSE_PRIORITY:
7824 case OMP_CLAUSE_GRAINSIZE:
7825 case OMP_CLAUSE_NUM_TASKS:
7826 case OMP_CLAUSE_NOGROUP:
7827 case OMP_CLAUSE_THREADS:
7828 case OMP_CLAUSE_SIMD:
7829 case OMP_CLAUSE_HINT:
7830 case OMP_CLAUSE_DEFAULTMAP:
7831 case OMP_CLAUSE_USE_DEVICE_PTR:
7832 case OMP_CLAUSE_IS_DEVICE_PTR:
7833 case OMP_CLAUSE__CILK_FOR_COUNT_:
7834 case OMP_CLAUSE_ASYNC:
7835 case OMP_CLAUSE_WAIT:
7836 case OMP_CLAUSE_DEVICE_RESIDENT:
7837 case OMP_CLAUSE_USE_DEVICE:
7838 case OMP_CLAUSE_INDEPENDENT:
7839 case OMP_CLAUSE_NUM_GANGS:
7840 case OMP_CLAUSE_NUM_WORKERS:
7841 case OMP_CLAUSE_VECTOR_LENGTH:
7842 case OMP_CLAUSE_GANG:
7843 case OMP_CLAUSE_WORKER:
7844 case OMP_CLAUSE_VECTOR:
7845 case OMP_CLAUSE_AUTO:
7846 case OMP_CLAUSE_SEQ:
7847 case OMP_CLAUSE_TILE:
7848 break;
7850 default:
7851 gcc_unreachable ();
7854 if (remove)
7855 *list_p = OMP_CLAUSE_CHAIN (c);
7856 else
7857 list_p = &OMP_CLAUSE_CHAIN (c);
7860 /* Add in any implicit data sharing. */
7861 struct gimplify_adjust_omp_clauses_data data;
7862 data.list_p = list_p;
7863 data.pre_p = pre_p;
7864 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
7866 gimplify_omp_ctxp = ctx->outer_context;
7867 delete_omp_context (ctx);
7870 /* Gimplify OACC_CACHE. */
7872 static void
7873 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
7875 tree expr = *expr_p;
7877 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
7878 OACC_CACHE);
7879 gimplify_adjust_omp_clauses (pre_p, &OACC_CACHE_CLAUSES (expr), OACC_CACHE);
7881 /* TODO: Do something sensible with this information. */
7883 *expr_p = NULL_TREE;
7886 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
7887 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
7888 kind. The entry kind will replace the one in CLAUSE, while the exit
7889 kind will be used in a new omp_clause and returned to the caller. */
7891 static tree
7892 gimplify_oacc_declare_1 (tree clause)
7894 HOST_WIDE_INT kind, new_op;
7895 bool ret = false;
7896 tree c = NULL;
7898 kind = OMP_CLAUSE_MAP_KIND (clause);
7900 switch (kind)
7902 case GOMP_MAP_ALLOC:
7903 case GOMP_MAP_FORCE_ALLOC:
7904 case GOMP_MAP_FORCE_TO:
7905 new_op = GOMP_MAP_FORCE_DEALLOC;
7906 ret = true;
7907 break;
7909 case GOMP_MAP_FORCE_FROM:
7910 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
7911 new_op = GOMP_MAP_FORCE_FROM;
7912 ret = true;
7913 break;
7915 case GOMP_MAP_FORCE_TOFROM:
7916 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
7917 new_op = GOMP_MAP_FORCE_FROM;
7918 ret = true;
7919 break;
7921 case GOMP_MAP_FROM:
7922 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
7923 new_op = GOMP_MAP_FROM;
7924 ret = true;
7925 break;
7927 case GOMP_MAP_TOFROM:
7928 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
7929 new_op = GOMP_MAP_FROM;
7930 ret = true;
7931 break;
7933 case GOMP_MAP_DEVICE_RESIDENT:
7934 case GOMP_MAP_FORCE_DEVICEPTR:
7935 case GOMP_MAP_FORCE_PRESENT:
7936 case GOMP_MAP_LINK:
7937 case GOMP_MAP_POINTER:
7938 case GOMP_MAP_TO:
7939 break;
7941 default:
7942 gcc_unreachable ();
7943 break;
7946 if (ret)
7948 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
7949 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
7950 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
7953 return c;
7956 /* Gimplify OACC_DECLARE. */
7958 static void
7959 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
7961 tree expr = *expr_p;
7962 gomp_target *stmt;
7963 tree clauses, t;
7965 clauses = OACC_DECLARE_CLAUSES (expr);
7967 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
7969 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
7971 tree decl = OMP_CLAUSE_DECL (t);
7973 if (TREE_CODE (decl) == MEM_REF)
7974 continue;
7976 if (TREE_CODE (decl) == VAR_DECL
7977 && !is_global_var (decl)
7978 && DECL_CONTEXT (decl) == current_function_decl)
7980 tree c = gimplify_oacc_declare_1 (t);
7981 if (c)
7983 if (oacc_declare_returns == NULL)
7984 oacc_declare_returns = new hash_map<tree, tree>;
7986 oacc_declare_returns->put (decl, c);
7990 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
7993 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
7994 clauses);
7996 gimplify_seq_add_stmt (pre_p, stmt);
7998 *expr_p = NULL_TREE;
8001 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
8002 gimplification of the body, as well as scanning the body for used
8003 variables. We need to do this scan now, because variable-sized
8004 decls will be decomposed during gimplification. */
8006 static void
8007 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
8009 tree expr = *expr_p;
8010 gimple *g;
8011 gimple_seq body = NULL;
8013 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
8014 OMP_PARALLEL_COMBINED (expr)
8015 ? ORT_COMBINED_PARALLEL
8016 : ORT_PARALLEL, OMP_PARALLEL);
8018 push_gimplify_context ();
8020 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
8021 if (gimple_code (g) == GIMPLE_BIND)
8022 pop_gimplify_context (g);
8023 else
8024 pop_gimplify_context (NULL);
8026 gimplify_adjust_omp_clauses (pre_p, &OMP_PARALLEL_CLAUSES (expr),
8027 OMP_PARALLEL);
8029 g = gimple_build_omp_parallel (body,
8030 OMP_PARALLEL_CLAUSES (expr),
8031 NULL_TREE, NULL_TREE);
8032 if (OMP_PARALLEL_COMBINED (expr))
8033 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
8034 gimplify_seq_add_stmt (pre_p, g);
8035 *expr_p = NULL_TREE;
8038 /* Gimplify the contents of an OMP_TASK statement. This involves
8039 gimplification of the body, as well as scanning the body for used
8040 variables. We need to do this scan now, because variable-sized
8041 decls will be decomposed during gimplification. */
8043 static void
8044 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
8046 tree expr = *expr_p;
8047 gimple *g;
8048 gimple_seq body = NULL;
8050 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
8051 find_omp_clause (OMP_TASK_CLAUSES (expr),
8052 OMP_CLAUSE_UNTIED)
8053 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
8055 push_gimplify_context ();
8057 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
8058 if (gimple_code (g) == GIMPLE_BIND)
8059 pop_gimplify_context (g);
8060 else
8061 pop_gimplify_context (NULL);
8063 gimplify_adjust_omp_clauses (pre_p, &OMP_TASK_CLAUSES (expr), OMP_TASK);
8065 g = gimple_build_omp_task (body,
8066 OMP_TASK_CLAUSES (expr),
8067 NULL_TREE, NULL_TREE,
8068 NULL_TREE, NULL_TREE, NULL_TREE);
8069 gimplify_seq_add_stmt (pre_p, g);
8070 *expr_p = NULL_TREE;
8073 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
8074 with non-NULL OMP_FOR_INIT. */
8076 static tree
8077 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
8079 *walk_subtrees = 0;
8080 switch (TREE_CODE (*tp))
8082 case OMP_FOR:
8083 *walk_subtrees = 1;
8084 /* FALLTHRU */
8085 case OMP_SIMD:
8086 if (OMP_FOR_INIT (*tp) != NULL_TREE)
8087 return *tp;
8088 break;
8089 case BIND_EXPR:
8090 case STATEMENT_LIST:
8091 case OMP_PARALLEL:
8092 *walk_subtrees = 1;
8093 break;
8094 default:
8095 break;
8097 return NULL_TREE;
8100 /* Gimplify the gross structure of an OMP_FOR statement. */
8102 static enum gimplify_status
8103 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
8105 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
8106 enum gimplify_status ret = GS_ALL_DONE;
8107 enum gimplify_status tret;
8108 gomp_for *gfor;
8109 gimple_seq for_body, for_pre_body;
8110 int i;
8111 bitmap has_decl_expr = NULL;
8112 enum omp_region_type ort = ORT_WORKSHARE;
8114 orig_for_stmt = for_stmt = *expr_p;
8116 switch (TREE_CODE (for_stmt))
8118 case OMP_FOR:
8119 case CILK_FOR:
8120 case OMP_DISTRIBUTE:
8121 break;
8122 case OACC_LOOP:
8123 ort = ORT_ACC;
8124 break;
8125 case OMP_TASKLOOP:
8126 if (find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
8127 ort = ORT_UNTIED_TASK;
8128 else
8129 ort = ORT_TASK;
8130 break;
8131 case OMP_SIMD:
8132 case CILK_SIMD:
8133 ort = ORT_SIMD;
8134 break;
8135 default:
8136 gcc_unreachable ();
8139 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
8140 clause for the IV. */
8141 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8143 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
8144 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8145 decl = TREE_OPERAND (t, 0);
8146 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8147 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8148 && OMP_CLAUSE_DECL (c) == decl)
8150 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8151 break;
8155 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8157 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
8158 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
8159 find_combined_omp_for, NULL, NULL);
8160 if (inner_for_stmt == NULL_TREE)
8162 gcc_assert (seen_error ());
8163 *expr_p = NULL_TREE;
8164 return GS_ERROR;
8168 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
8169 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
8170 TREE_CODE (for_stmt));
8172 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
8173 gimplify_omp_ctxp->distribute = true;
8175 /* Handle OMP_FOR_INIT. */
8176 for_pre_body = NULL;
8177 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
8179 has_decl_expr = BITMAP_ALLOC (NULL);
8180 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
8181 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
8182 == VAR_DECL)
8184 t = OMP_FOR_PRE_BODY (for_stmt);
8185 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8187 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
8189 tree_stmt_iterator si;
8190 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
8191 tsi_next (&si))
8193 t = tsi_stmt (si);
8194 if (TREE_CODE (t) == DECL_EXPR
8195 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
8196 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8200 if (OMP_FOR_PRE_BODY (for_stmt))
8202 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
8203 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8204 else
8206 struct gimplify_omp_ctx ctx;
8207 memset (&ctx, 0, sizeof (ctx));
8208 ctx.region_type = ORT_NONE;
8209 gimplify_omp_ctxp = &ctx;
8210 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8211 gimplify_omp_ctxp = NULL;
8214 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
8216 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8217 for_stmt = inner_for_stmt;
8219 /* For taskloop, need to gimplify the start, end and step before the
8220 taskloop, outside of the taskloop omp context. */
8221 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8223 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8225 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8226 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8228 TREE_OPERAND (t, 1)
8229 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8230 pre_p, NULL);
8231 tree c = build_omp_clause (input_location,
8232 OMP_CLAUSE_FIRSTPRIVATE);
8233 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8234 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8235 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8238 /* Handle OMP_FOR_COND. */
8239 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8240 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8242 TREE_OPERAND (t, 1)
8243 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8244 gimple_seq_empty_p (for_pre_body)
8245 ? pre_p : &for_pre_body, NULL);
8246 tree c = build_omp_clause (input_location,
8247 OMP_CLAUSE_FIRSTPRIVATE);
8248 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8249 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8250 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8253 /* Handle OMP_FOR_INCR. */
8254 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8255 if (TREE_CODE (t) == MODIFY_EXPR)
8257 decl = TREE_OPERAND (t, 0);
8258 t = TREE_OPERAND (t, 1);
8259 tree *tp = &TREE_OPERAND (t, 1);
8260 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
8261 tp = &TREE_OPERAND (t, 0);
8263 if (!is_gimple_constant (*tp))
8265 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
8266 ? pre_p : &for_pre_body;
8267 *tp = get_initialized_tmp_var (*tp, seq, NULL);
8268 tree c = build_omp_clause (input_location,
8269 OMP_CLAUSE_FIRSTPRIVATE);
8270 OMP_CLAUSE_DECL (c) = *tp;
8271 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8272 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8277 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
8278 OMP_TASKLOOP);
8281 if (orig_for_stmt != for_stmt)
8282 gimplify_omp_ctxp->combined_loop = true;
8284 for_body = NULL;
8285 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8286 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
8287 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8288 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
8290 tree c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
8291 bool is_doacross = false;
8292 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
8294 is_doacross = true;
8295 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
8296 (OMP_FOR_INIT (for_stmt))
8297 * 2);
8299 int collapse = 1;
8300 c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
8301 if (c)
8302 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
8303 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8305 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8306 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8307 decl = TREE_OPERAND (t, 0);
8308 gcc_assert (DECL_P (decl));
8309 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
8310 || POINTER_TYPE_P (TREE_TYPE (decl)));
8311 if (is_doacross)
8313 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
8314 gimplify_omp_ctxp->loop_iter_var.quick_push
8315 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
8316 else
8317 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8318 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8321 /* Make sure the iteration variable is private. */
8322 tree c = NULL_TREE;
8323 tree c2 = NULL_TREE;
8324 if (orig_for_stmt != for_stmt)
8325 /* Do this only on innermost construct for combined ones. */;
8326 else if (ort == ORT_SIMD)
8328 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
8329 (splay_tree_key) decl);
8330 omp_is_private (gimplify_omp_ctxp, decl,
8331 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8332 != 1));
8333 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8334 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8335 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8337 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8338 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8339 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
8340 if ((has_decl_expr
8341 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8342 || omp_no_lastprivate (gimplify_omp_ctxp))
8344 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8345 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8347 struct gimplify_omp_ctx *outer
8348 = gimplify_omp_ctxp->outer_context;
8349 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8351 if (outer->region_type == ORT_WORKSHARE
8352 && outer->combined_loop)
8354 n = splay_tree_lookup (outer->variables,
8355 (splay_tree_key)decl);
8356 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8358 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8359 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8361 else
8363 struct gimplify_omp_ctx *octx = outer->outer_context;
8364 if (octx
8365 && octx->region_type == ORT_COMBINED_PARALLEL
8366 && octx->outer_context
8367 && (octx->outer_context->region_type
8368 == ORT_WORKSHARE)
8369 && octx->outer_context->combined_loop)
8371 octx = octx->outer_context;
8372 n = splay_tree_lookup (octx->variables,
8373 (splay_tree_key)decl);
8374 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8376 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8377 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8384 OMP_CLAUSE_DECL (c) = decl;
8385 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8386 OMP_FOR_CLAUSES (for_stmt) = c;
8387 omp_add_variable (gimplify_omp_ctxp, decl, flags);
8388 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8390 if (outer->region_type == ORT_WORKSHARE
8391 && outer->combined_loop)
8393 if (outer->outer_context
8394 && (outer->outer_context->region_type
8395 == ORT_COMBINED_PARALLEL))
8396 outer = outer->outer_context;
8397 else if (omp_check_private (outer, decl, false))
8398 outer = NULL;
8400 else if (((outer->region_type & ORT_TASK) != 0)
8401 && outer->combined_loop
8402 && !omp_check_private (gimplify_omp_ctxp,
8403 decl, false))
8405 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8406 outer = NULL;
8407 if (outer)
8409 n = splay_tree_lookup (outer->variables,
8410 (splay_tree_key)decl);
8411 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8413 omp_add_variable (outer, decl,
8414 GOVD_LASTPRIVATE | GOVD_SEEN);
8415 if (outer->region_type == ORT_COMBINED_PARALLEL
8416 && outer->outer_context
8417 && (outer->outer_context->region_type
8418 == ORT_WORKSHARE)
8419 && outer->outer_context->combined_loop)
8421 outer = outer->outer_context;
8422 n = splay_tree_lookup (outer->variables,
8423 (splay_tree_key)decl);
8424 if (omp_check_private (outer, decl, false))
8425 outer = NULL;
8426 else if (n == NULL
8427 || ((n->value & GOVD_DATA_SHARE_CLASS)
8428 == 0))
8429 omp_add_variable (outer, decl,
8430 GOVD_LASTPRIVATE
8431 | GOVD_SEEN);
8432 else
8433 outer = NULL;
8435 if (outer && outer->outer_context
8436 && (outer->outer_context->region_type
8437 == ORT_COMBINED_TEAMS))
8439 outer = outer->outer_context;
8440 n = splay_tree_lookup (outer->variables,
8441 (splay_tree_key)decl);
8442 if (n == NULL
8443 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8444 omp_add_variable (outer, decl,
8445 GOVD_SHARED | GOVD_SEEN);
8446 else
8447 outer = NULL;
8449 if (outer && outer->outer_context)
8450 omp_notice_variable (outer->outer_context, decl,
8451 true);
8456 else
8458 bool lastprivate
8459 = (!has_decl_expr
8460 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8461 && !omp_no_lastprivate (gimplify_omp_ctxp);
8462 struct gimplify_omp_ctx *outer
8463 = gimplify_omp_ctxp->outer_context;
8464 if (outer && lastprivate)
8466 if (outer->region_type == ORT_WORKSHARE
8467 && outer->combined_loop)
8469 n = splay_tree_lookup (outer->variables,
8470 (splay_tree_key)decl);
8471 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8473 lastprivate = false;
8474 outer = NULL;
8476 else if (outer->outer_context
8477 && (outer->outer_context->region_type
8478 == ORT_COMBINED_PARALLEL))
8479 outer = outer->outer_context;
8480 else if (omp_check_private (outer, decl, false))
8481 outer = NULL;
8483 else if (((outer->region_type & ORT_TASK) != 0)
8484 && outer->combined_loop
8485 && !omp_check_private (gimplify_omp_ctxp,
8486 decl, false))
8488 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8489 outer = NULL;
8490 if (outer)
8492 n = splay_tree_lookup (outer->variables,
8493 (splay_tree_key)decl);
8494 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8496 omp_add_variable (outer, decl,
8497 GOVD_LASTPRIVATE | GOVD_SEEN);
8498 if (outer->region_type == ORT_COMBINED_PARALLEL
8499 && outer->outer_context
8500 && (outer->outer_context->region_type
8501 == ORT_WORKSHARE)
8502 && outer->outer_context->combined_loop)
8504 outer = outer->outer_context;
8505 n = splay_tree_lookup (outer->variables,
8506 (splay_tree_key)decl);
8507 if (omp_check_private (outer, decl, false))
8508 outer = NULL;
8509 else if (n == NULL
8510 || ((n->value & GOVD_DATA_SHARE_CLASS)
8511 == 0))
8512 omp_add_variable (outer, decl,
8513 GOVD_LASTPRIVATE
8514 | GOVD_SEEN);
8515 else
8516 outer = NULL;
8518 if (outer && outer->outer_context
8519 && (outer->outer_context->region_type
8520 == ORT_COMBINED_TEAMS))
8522 outer = outer->outer_context;
8523 n = splay_tree_lookup (outer->variables,
8524 (splay_tree_key)decl);
8525 if (n == NULL
8526 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8527 omp_add_variable (outer, decl,
8528 GOVD_SHARED | GOVD_SEEN);
8529 else
8530 outer = NULL;
8532 if (outer && outer->outer_context)
8533 omp_notice_variable (outer->outer_context, decl,
8534 true);
8539 c = build_omp_clause (input_location,
8540 lastprivate ? OMP_CLAUSE_LASTPRIVATE
8541 : OMP_CLAUSE_PRIVATE);
8542 OMP_CLAUSE_DECL (c) = decl;
8543 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8544 OMP_FOR_CLAUSES (for_stmt) = c;
8545 omp_add_variable (gimplify_omp_ctxp, decl,
8546 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
8547 | GOVD_EXPLICIT | GOVD_SEEN);
8548 c = NULL_TREE;
8551 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
8552 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8553 else
8554 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
8556 /* If DECL is not a gimple register, create a temporary variable to act
8557 as an iteration counter. This is valid, since DECL cannot be
8558 modified in the body of the loop. Similarly for any iteration vars
8559 in simd with collapse > 1 where the iterator vars must be
8560 lastprivate. */
8561 if (orig_for_stmt != for_stmt)
8562 var = decl;
8563 else if (!is_gimple_reg (decl)
8564 || (ort == ORT_SIMD
8565 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
8567 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
8568 TREE_OPERAND (t, 0) = var;
8570 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
8572 if (ort == ORT_SIMD
8573 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8575 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8576 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
8577 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
8578 OMP_CLAUSE_DECL (c2) = var;
8579 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
8580 OMP_FOR_CLAUSES (for_stmt) = c2;
8581 omp_add_variable (gimplify_omp_ctxp, var,
8582 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
8583 if (c == NULL_TREE)
8585 c = c2;
8586 c2 = NULL_TREE;
8589 else
8590 omp_add_variable (gimplify_omp_ctxp, var,
8591 GOVD_PRIVATE | GOVD_SEEN);
8593 else
8594 var = decl;
8596 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
8597 is_gimple_val, fb_rvalue);
8598 ret = MIN (ret, tret);
8599 if (ret == GS_ERROR)
8600 return ret;
8602 /* Handle OMP_FOR_COND. */
8603 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8604 gcc_assert (COMPARISON_CLASS_P (t));
8605 gcc_assert (TREE_OPERAND (t, 0) == decl);
8607 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
8608 is_gimple_val, fb_rvalue);
8609 ret = MIN (ret, tret);
8611 /* Handle OMP_FOR_INCR. */
8612 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8613 switch (TREE_CODE (t))
8615 case PREINCREMENT_EXPR:
8616 case POSTINCREMENT_EXPR:
8618 tree decl = TREE_OPERAND (t, 0);
8619 /* c_omp_for_incr_canonicalize_ptr() should have been
8620 called to massage things appropriately. */
8621 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
8623 if (orig_for_stmt != for_stmt)
8624 break;
8625 t = build_int_cst (TREE_TYPE (decl), 1);
8626 if (c)
8627 OMP_CLAUSE_LINEAR_STEP (c) = t;
8628 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
8629 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
8630 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
8631 break;
8634 case PREDECREMENT_EXPR:
8635 case POSTDECREMENT_EXPR:
8636 /* c_omp_for_incr_canonicalize_ptr() should have been
8637 called to massage things appropriately. */
8638 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
8639 if (orig_for_stmt != for_stmt)
8640 break;
8641 t = build_int_cst (TREE_TYPE (decl), -1);
8642 if (c)
8643 OMP_CLAUSE_LINEAR_STEP (c) = t;
8644 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
8645 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
8646 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
8647 break;
8649 case MODIFY_EXPR:
8650 gcc_assert (TREE_OPERAND (t, 0) == decl);
8651 TREE_OPERAND (t, 0) = var;
8653 t = TREE_OPERAND (t, 1);
8654 switch (TREE_CODE (t))
8656 case PLUS_EXPR:
8657 if (TREE_OPERAND (t, 1) == decl)
8659 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
8660 TREE_OPERAND (t, 0) = var;
8661 break;
8664 /* Fallthru. */
8665 case MINUS_EXPR:
8666 case POINTER_PLUS_EXPR:
8667 gcc_assert (TREE_OPERAND (t, 0) == decl);
8668 TREE_OPERAND (t, 0) = var;
8669 break;
8670 default:
8671 gcc_unreachable ();
8674 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
8675 is_gimple_val, fb_rvalue);
8676 ret = MIN (ret, tret);
8677 if (c)
8679 tree step = TREE_OPERAND (t, 1);
8680 tree stept = TREE_TYPE (decl);
8681 if (POINTER_TYPE_P (stept))
8682 stept = sizetype;
8683 step = fold_convert (stept, step);
8684 if (TREE_CODE (t) == MINUS_EXPR)
8685 step = fold_build1 (NEGATE_EXPR, stept, step);
8686 OMP_CLAUSE_LINEAR_STEP (c) = step;
8687 if (step != TREE_OPERAND (t, 1))
8689 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
8690 &for_pre_body, NULL,
8691 is_gimple_val, fb_rvalue);
8692 ret = MIN (ret, tret);
8695 break;
8697 default:
8698 gcc_unreachable ();
8701 if (c2)
8703 gcc_assert (c);
8704 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
8707 if ((var != decl || collapse > 1) && orig_for_stmt == for_stmt)
8709 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
8710 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8711 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
8712 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8713 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
8714 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
8715 && OMP_CLAUSE_DECL (c) == decl)
8717 if (is_doacross && (collapse == 1 || i >= collapse))
8718 t = var;
8719 else
8721 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8722 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8723 gcc_assert (TREE_OPERAND (t, 0) == var);
8724 t = TREE_OPERAND (t, 1);
8725 gcc_assert (TREE_CODE (t) == PLUS_EXPR
8726 || TREE_CODE (t) == MINUS_EXPR
8727 || TREE_CODE (t) == POINTER_PLUS_EXPR);
8728 gcc_assert (TREE_OPERAND (t, 0) == var);
8729 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
8730 is_doacross ? var : decl,
8731 TREE_OPERAND (t, 1));
8733 gimple_seq *seq;
8734 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
8735 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
8736 else
8737 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
8738 gimplify_assign (decl, t, seq);
8743 BITMAP_FREE (has_decl_expr);
8745 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8747 push_gimplify_context ();
8748 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
8750 OMP_FOR_BODY (orig_for_stmt)
8751 = build3 (BIND_EXPR, void_type_node, NULL,
8752 OMP_FOR_BODY (orig_for_stmt), NULL);
8753 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
8757 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
8758 &for_body);
8760 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8762 if (gimple_code (g) == GIMPLE_BIND)
8763 pop_gimplify_context (g);
8764 else
8765 pop_gimplify_context (NULL);
8768 if (orig_for_stmt != for_stmt)
8769 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8771 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8772 decl = TREE_OPERAND (t, 0);
8773 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8774 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8775 gimplify_omp_ctxp = ctx->outer_context;
8776 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
8777 gimplify_omp_ctxp = ctx;
8778 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
8779 TREE_OPERAND (t, 0) = var;
8780 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8781 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
8782 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
8785 gimplify_adjust_omp_clauses (pre_p, &OMP_FOR_CLAUSES (orig_for_stmt),
8786 TREE_CODE (orig_for_stmt));
8788 int kind;
8789 switch (TREE_CODE (orig_for_stmt))
8791 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
8792 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
8793 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
8794 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
8795 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
8796 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
8797 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
8798 default:
8799 gcc_unreachable ();
8801 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
8802 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
8803 for_pre_body);
8804 if (orig_for_stmt != for_stmt)
8805 gimple_omp_for_set_combined_p (gfor, true);
8806 if (gimplify_omp_ctxp
8807 && (gimplify_omp_ctxp->combined_loop
8808 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
8809 && gimplify_omp_ctxp->outer_context
8810 && gimplify_omp_ctxp->outer_context->combined_loop)))
8812 gimple_omp_for_set_combined_into_p (gfor, true);
8813 if (gimplify_omp_ctxp->combined_loop)
8814 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
8815 else
8816 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
8819 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8821 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8822 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
8823 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
8824 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8825 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
8826 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
8827 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8828 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
8831 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
8832 constructs with GIMPLE_OMP_TASK sandwiched in between them.
8833 The outer taskloop stands for computing the number of iterations,
8834 counts for collapsed loops and holding taskloop specific clauses.
8835 The task construct stands for the effect of data sharing on the
8836 explicit task it creates and the inner taskloop stands for expansion
8837 of the static loop inside of the explicit task construct. */
8838 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8840 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
8841 tree task_clauses = NULL_TREE;
8842 tree c = *gfor_clauses_ptr;
8843 tree *gtask_clauses_ptr = &task_clauses;
8844 tree outer_for_clauses = NULL_TREE;
8845 tree *gforo_clauses_ptr = &outer_for_clauses;
8846 for (; c; c = OMP_CLAUSE_CHAIN (c))
8847 switch (OMP_CLAUSE_CODE (c))
8849 /* These clauses are allowed on task, move them there. */
8850 case OMP_CLAUSE_SHARED:
8851 case OMP_CLAUSE_FIRSTPRIVATE:
8852 case OMP_CLAUSE_DEFAULT:
8853 case OMP_CLAUSE_IF:
8854 case OMP_CLAUSE_UNTIED:
8855 case OMP_CLAUSE_FINAL:
8856 case OMP_CLAUSE_MERGEABLE:
8857 case OMP_CLAUSE_PRIORITY:
8858 *gtask_clauses_ptr = c;
8859 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
8860 break;
8861 case OMP_CLAUSE_PRIVATE:
8862 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
8864 /* We want private on outer for and firstprivate
8865 on task. */
8866 *gtask_clauses_ptr
8867 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8868 OMP_CLAUSE_FIRSTPRIVATE);
8869 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
8870 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
8871 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
8872 *gforo_clauses_ptr = c;
8873 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
8875 else
8877 *gtask_clauses_ptr = c;
8878 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
8880 break;
8881 /* These clauses go into outer taskloop clauses. */
8882 case OMP_CLAUSE_GRAINSIZE:
8883 case OMP_CLAUSE_NUM_TASKS:
8884 case OMP_CLAUSE_NOGROUP:
8885 *gforo_clauses_ptr = c;
8886 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
8887 break;
8888 /* Taskloop clause we duplicate on both taskloops. */
8889 case OMP_CLAUSE_COLLAPSE:
8890 *gfor_clauses_ptr = c;
8891 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
8892 *gforo_clauses_ptr = copy_node (c);
8893 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
8894 break;
8895 /* For lastprivate, keep the clause on inner taskloop, and add
8896 a shared clause on task. If the same decl is also firstprivate,
8897 add also firstprivate clause on the inner taskloop. */
8898 case OMP_CLAUSE_LASTPRIVATE:
8899 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
8901 /* For taskloop C++ lastprivate IVs, we want:
8902 1) private on outer taskloop
8903 2) firstprivate and shared on task
8904 3) lastprivate on inner taskloop */
8905 *gtask_clauses_ptr
8906 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8907 OMP_CLAUSE_FIRSTPRIVATE);
8908 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
8909 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
8910 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
8911 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
8912 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8913 OMP_CLAUSE_PRIVATE);
8914 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
8915 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
8916 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
8917 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
8919 *gfor_clauses_ptr = c;
8920 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
8921 *gtask_clauses_ptr
8922 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
8923 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
8924 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8925 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
8926 gtask_clauses_ptr
8927 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
8928 break;
8929 default:
8930 gcc_unreachable ();
8932 *gfor_clauses_ptr = NULL_TREE;
8933 *gtask_clauses_ptr = NULL_TREE;
8934 *gforo_clauses_ptr = NULL_TREE;
8935 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
8936 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
8937 NULL_TREE, NULL_TREE, NULL_TREE);
8938 gimple_omp_task_set_taskloop_p (g, true);
8939 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
8940 gomp_for *gforo
8941 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
8942 gimple_omp_for_collapse (gfor),
8943 gimple_omp_for_pre_body (gfor));
8944 gimple_omp_for_set_pre_body (gfor, NULL);
8945 gimple_omp_for_set_combined_p (gforo, true);
8946 gimple_omp_for_set_combined_into_p (gfor, true);
8947 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
8949 t = unshare_expr (gimple_omp_for_index (gfor, i));
8950 gimple_omp_for_set_index (gforo, i, t);
8951 t = unshare_expr (gimple_omp_for_initial (gfor, i));
8952 gimple_omp_for_set_initial (gforo, i, t);
8953 gimple_omp_for_set_cond (gforo, i,
8954 gimple_omp_for_cond (gfor, i));
8955 t = unshare_expr (gimple_omp_for_final (gfor, i));
8956 gimple_omp_for_set_final (gforo, i, t);
8957 t = unshare_expr (gimple_omp_for_incr (gfor, i));
8958 gimple_omp_for_set_incr (gforo, i, t);
8960 gimplify_seq_add_stmt (pre_p, gforo);
8962 else
8963 gimplify_seq_add_stmt (pre_p, gfor);
8964 if (ret != GS_ALL_DONE)
8965 return GS_ERROR;
8966 *expr_p = NULL_TREE;
8967 return GS_ALL_DONE;
8970 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
8971 of OMP_TARGET's body. */
8973 static tree
8974 find_omp_teams (tree *tp, int *walk_subtrees, void *)
8976 *walk_subtrees = 0;
8977 switch (TREE_CODE (*tp))
8979 case OMP_TEAMS:
8980 return *tp;
8981 case BIND_EXPR:
8982 case STATEMENT_LIST:
8983 *walk_subtrees = 1;
8984 break;
8985 default:
8986 break;
8988 return NULL_TREE;
8991 /* Helper function of optimize_target_teams, determine if the expression
8992 can be computed safely before the target construct on the host. */
8994 static tree
8995 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
8997 splay_tree_node n;
8999 if (TYPE_P (*tp))
9001 *walk_subtrees = 0;
9002 return NULL_TREE;
9004 switch (TREE_CODE (*tp))
9006 case VAR_DECL:
9007 case PARM_DECL:
9008 case RESULT_DECL:
9009 *walk_subtrees = 0;
9010 if (error_operand_p (*tp)
9011 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
9012 || DECL_HAS_VALUE_EXPR_P (*tp)
9013 || DECL_THREAD_LOCAL_P (*tp)
9014 || TREE_SIDE_EFFECTS (*tp)
9015 || TREE_THIS_VOLATILE (*tp))
9016 return *tp;
9017 if (is_global_var (*tp)
9018 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
9019 || lookup_attribute ("omp declare target link",
9020 DECL_ATTRIBUTES (*tp))))
9021 return *tp;
9022 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9023 (splay_tree_key) *tp);
9024 if (n == NULL)
9026 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
9027 return NULL_TREE;
9028 return *tp;
9030 else if (n->value & GOVD_LOCAL)
9031 return *tp;
9032 else if (n->value & GOVD_FIRSTPRIVATE)
9033 return NULL_TREE;
9034 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9035 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9036 return NULL_TREE;
9037 return *tp;
9038 case INTEGER_CST:
9039 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9040 return *tp;
9041 return NULL_TREE;
9042 case TARGET_EXPR:
9043 if (TARGET_EXPR_INITIAL (*tp)
9044 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
9045 return *tp;
9046 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
9047 walk_subtrees, NULL);
9048 /* Allow some reasonable subset of integral arithmetics. */
9049 case PLUS_EXPR:
9050 case MINUS_EXPR:
9051 case MULT_EXPR:
9052 case TRUNC_DIV_EXPR:
9053 case CEIL_DIV_EXPR:
9054 case FLOOR_DIV_EXPR:
9055 case ROUND_DIV_EXPR:
9056 case TRUNC_MOD_EXPR:
9057 case CEIL_MOD_EXPR:
9058 case FLOOR_MOD_EXPR:
9059 case ROUND_MOD_EXPR:
9060 case RDIV_EXPR:
9061 case EXACT_DIV_EXPR:
9062 case MIN_EXPR:
9063 case MAX_EXPR:
9064 case LSHIFT_EXPR:
9065 case RSHIFT_EXPR:
9066 case BIT_IOR_EXPR:
9067 case BIT_XOR_EXPR:
9068 case BIT_AND_EXPR:
9069 case NEGATE_EXPR:
9070 case ABS_EXPR:
9071 case BIT_NOT_EXPR:
9072 case NON_LVALUE_EXPR:
9073 CASE_CONVERT:
9074 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9075 return *tp;
9076 return NULL_TREE;
9077 /* And disallow anything else, except for comparisons. */
9078 default:
9079 if (COMPARISON_CLASS_P (*tp))
9080 return NULL_TREE;
9081 return *tp;
9085 /* Try to determine if the num_teams and/or thread_limit expressions
9086 can have their values determined already before entering the
9087 target construct.
9088 INTEGER_CSTs trivially are,
9089 integral decls that are firstprivate (explicitly or implicitly)
9090 or explicitly map(always, to:) or map(always, tofrom:) on the target
9091 region too, and expressions involving simple arithmetics on those
9092 too, function calls are not ok, dereferencing something neither etc.
9093 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
9094 EXPR based on what we find:
9095 0 stands for clause not specified at all, use implementation default
9096 -1 stands for value that can't be determined easily before entering
9097 the target construct.
9098 If teams construct is not present at all, use 1 for num_teams
9099 and 0 for thread_limit (only one team is involved, and the thread
9100 limit is implementation defined. */
9102 static void
9103 optimize_target_teams (tree target, gimple_seq *pre_p)
9105 tree body = OMP_BODY (target);
9106 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
9107 tree num_teams = integer_zero_node;
9108 tree thread_limit = integer_zero_node;
9109 location_t num_teams_loc = EXPR_LOCATION (target);
9110 location_t thread_limit_loc = EXPR_LOCATION (target);
9111 tree c, *p, expr;
9112 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
9114 if (teams == NULL_TREE)
9115 num_teams = integer_one_node;
9116 else
9117 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
9119 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
9121 p = &num_teams;
9122 num_teams_loc = OMP_CLAUSE_LOCATION (c);
9124 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
9126 p = &thread_limit;
9127 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
9129 else
9130 continue;
9131 expr = OMP_CLAUSE_OPERAND (c, 0);
9132 if (TREE_CODE (expr) == INTEGER_CST)
9134 *p = expr;
9135 continue;
9137 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
9139 *p = integer_minus_one_node;
9140 continue;
9142 *p = expr;
9143 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
9144 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue)
9145 == GS_ERROR)
9147 gimplify_omp_ctxp = target_ctx;
9148 *p = integer_minus_one_node;
9149 continue;
9151 gimplify_omp_ctxp = target_ctx;
9152 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
9153 OMP_CLAUSE_OPERAND (c, 0) = *p;
9155 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
9156 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
9157 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9158 OMP_TARGET_CLAUSES (target) = c;
9159 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
9160 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
9161 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9162 OMP_TARGET_CLAUSES (target) = c;
9165 /* Gimplify the gross structure of several OMP constructs. */
9167 static void
9168 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
9170 tree expr = *expr_p;
9171 gimple *stmt;
9172 gimple_seq body = NULL;
9173 enum omp_region_type ort;
9175 switch (TREE_CODE (expr))
9177 case OMP_SECTIONS:
9178 case OMP_SINGLE:
9179 ort = ORT_WORKSHARE;
9180 break;
9181 case OMP_TARGET:
9182 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
9183 break;
9184 case OACC_KERNELS:
9185 ort = ORT_ACC_KERNELS;
9186 break;
9187 case OACC_PARALLEL:
9188 ort = ORT_ACC_PARALLEL;
9189 break;
9190 case OACC_DATA:
9191 ort = ORT_ACC_DATA;
9192 break;
9193 case OMP_TARGET_DATA:
9194 ort = ORT_TARGET_DATA;
9195 break;
9196 case OMP_TEAMS:
9197 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
9198 break;
9199 default:
9200 gcc_unreachable ();
9202 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
9203 TREE_CODE (expr));
9204 if (TREE_CODE (expr) == OMP_TARGET)
9205 optimize_target_teams (expr, pre_p);
9206 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
9208 push_gimplify_context ();
9209 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
9210 if (gimple_code (g) == GIMPLE_BIND)
9211 pop_gimplify_context (g);
9212 else
9213 pop_gimplify_context (NULL);
9214 if ((ort & ORT_TARGET_DATA) != 0)
9216 enum built_in_function end_ix;
9217 switch (TREE_CODE (expr))
9219 case OACC_DATA:
9220 end_ix = BUILT_IN_GOACC_DATA_END;
9221 break;
9222 case OMP_TARGET_DATA:
9223 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
9224 break;
9225 default:
9226 gcc_unreachable ();
9228 tree fn = builtin_decl_explicit (end_ix);
9229 g = gimple_build_call (fn, 0);
9230 gimple_seq cleanup = NULL;
9231 gimple_seq_add_stmt (&cleanup, g);
9232 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
9233 body = NULL;
9234 gimple_seq_add_stmt (&body, g);
9237 else
9238 gimplify_and_add (OMP_BODY (expr), &body);
9239 gimplify_adjust_omp_clauses (pre_p, &OMP_CLAUSES (expr), TREE_CODE (expr));
9241 switch (TREE_CODE (expr))
9243 case OACC_DATA:
9244 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
9245 OMP_CLAUSES (expr));
9246 break;
9247 case OACC_KERNELS:
9248 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
9249 OMP_CLAUSES (expr));
9250 break;
9251 case OACC_PARALLEL:
9252 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
9253 OMP_CLAUSES (expr));
9254 break;
9255 case OMP_SECTIONS:
9256 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
9257 break;
9258 case OMP_SINGLE:
9259 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
9260 break;
9261 case OMP_TARGET:
9262 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
9263 OMP_CLAUSES (expr));
9264 break;
9265 case OMP_TARGET_DATA:
9266 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
9267 OMP_CLAUSES (expr));
9268 break;
9269 case OMP_TEAMS:
9270 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
9271 break;
9272 default:
9273 gcc_unreachable ();
9276 gimplify_seq_add_stmt (pre_p, stmt);
9277 *expr_p = NULL_TREE;
9280 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
9281 target update constructs. */
9283 static void
9284 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
9286 tree expr = *expr_p;
9287 int kind;
9288 gomp_target *stmt;
9289 enum omp_region_type ort = ORT_WORKSHARE;
9291 switch (TREE_CODE (expr))
9293 case OACC_ENTER_DATA:
9294 case OACC_EXIT_DATA:
9295 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
9296 ort = ORT_ACC;
9297 break;
9298 case OACC_UPDATE:
9299 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
9300 ort = ORT_ACC;
9301 break;
9302 case OMP_TARGET_UPDATE:
9303 kind = GF_OMP_TARGET_KIND_UPDATE;
9304 break;
9305 case OMP_TARGET_ENTER_DATA:
9306 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
9307 break;
9308 case OMP_TARGET_EXIT_DATA:
9309 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
9310 break;
9311 default:
9312 gcc_unreachable ();
9314 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
9315 ort, TREE_CODE (expr));
9316 gimplify_adjust_omp_clauses (pre_p, &OMP_STANDALONE_CLAUSES (expr),
9317 TREE_CODE (expr));
9318 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
9320 gimplify_seq_add_stmt (pre_p, stmt);
9321 *expr_p = NULL_TREE;
9324 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
9325 stabilized the lhs of the atomic operation as *ADDR. Return true if
9326 EXPR is this stabilized form. */
9328 static bool
9329 goa_lhs_expr_p (tree expr, tree addr)
9331 /* Also include casts to other type variants. The C front end is fond
9332 of adding these for e.g. volatile variables. This is like
9333 STRIP_TYPE_NOPS but includes the main variant lookup. */
9334 STRIP_USELESS_TYPE_CONVERSION (expr);
9336 if (TREE_CODE (expr) == INDIRECT_REF)
9338 expr = TREE_OPERAND (expr, 0);
9339 while (expr != addr
9340 && (CONVERT_EXPR_P (expr)
9341 || TREE_CODE (expr) == NON_LVALUE_EXPR)
9342 && TREE_CODE (expr) == TREE_CODE (addr)
9343 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
9345 expr = TREE_OPERAND (expr, 0);
9346 addr = TREE_OPERAND (addr, 0);
9348 if (expr == addr)
9349 return true;
9350 return (TREE_CODE (addr) == ADDR_EXPR
9351 && TREE_CODE (expr) == ADDR_EXPR
9352 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
9354 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
9355 return true;
9356 return false;
9359 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
9360 expression does not involve the lhs, evaluate it into a temporary.
9361 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
9362 or -1 if an error was encountered. */
9364 static int
9365 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
9366 tree lhs_var)
9368 tree expr = *expr_p;
9369 int saw_lhs;
9371 if (goa_lhs_expr_p (expr, lhs_addr))
9373 *expr_p = lhs_var;
9374 return 1;
9376 if (is_gimple_val (expr))
9377 return 0;
9379 saw_lhs = 0;
9380 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
9382 case tcc_binary:
9383 case tcc_comparison:
9384 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
9385 lhs_var);
9386 case tcc_unary:
9387 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
9388 lhs_var);
9389 break;
9390 case tcc_expression:
9391 switch (TREE_CODE (expr))
9393 case TRUTH_ANDIF_EXPR:
9394 case TRUTH_ORIF_EXPR:
9395 case TRUTH_AND_EXPR:
9396 case TRUTH_OR_EXPR:
9397 case TRUTH_XOR_EXPR:
9398 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
9399 lhs_addr, lhs_var);
9400 case TRUTH_NOT_EXPR:
9401 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
9402 lhs_addr, lhs_var);
9403 break;
9404 case COMPOUND_EXPR:
9405 /* Break out any preevaluations from cp_build_modify_expr. */
9406 for (; TREE_CODE (expr) == COMPOUND_EXPR;
9407 expr = TREE_OPERAND (expr, 1))
9408 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
9409 *expr_p = expr;
9410 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
9411 default:
9412 break;
9414 break;
9415 default:
9416 break;
9419 if (saw_lhs == 0)
9421 enum gimplify_status gs;
9422 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
9423 if (gs != GS_ALL_DONE)
9424 saw_lhs = -1;
9427 return saw_lhs;
9430 /* Gimplify an OMP_ATOMIC statement. */
9432 static enum gimplify_status
9433 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
9435 tree addr = TREE_OPERAND (*expr_p, 0);
9436 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
9437 ? NULL : TREE_OPERAND (*expr_p, 1);
9438 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
9439 tree tmp_load;
9440 gomp_atomic_load *loadstmt;
9441 gomp_atomic_store *storestmt;
9443 tmp_load = create_tmp_reg (type);
9444 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
9445 return GS_ERROR;
9447 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
9448 != GS_ALL_DONE)
9449 return GS_ERROR;
9451 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
9452 gimplify_seq_add_stmt (pre_p, loadstmt);
9453 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
9454 != GS_ALL_DONE)
9455 return GS_ERROR;
9457 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
9458 rhs = tmp_load;
9459 storestmt = gimple_build_omp_atomic_store (rhs);
9460 gimplify_seq_add_stmt (pre_p, storestmt);
9461 if (OMP_ATOMIC_SEQ_CST (*expr_p))
9463 gimple_omp_atomic_set_seq_cst (loadstmt);
9464 gimple_omp_atomic_set_seq_cst (storestmt);
9466 switch (TREE_CODE (*expr_p))
9468 case OMP_ATOMIC_READ:
9469 case OMP_ATOMIC_CAPTURE_OLD:
9470 *expr_p = tmp_load;
9471 gimple_omp_atomic_set_need_value (loadstmt);
9472 break;
9473 case OMP_ATOMIC_CAPTURE_NEW:
9474 *expr_p = rhs;
9475 gimple_omp_atomic_set_need_value (storestmt);
9476 break;
9477 default:
9478 *expr_p = NULL;
9479 break;
9482 return GS_ALL_DONE;
9485 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
9486 body, and adding some EH bits. */
9488 static enum gimplify_status
9489 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
9491 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
9492 gimple *body_stmt;
9493 gtransaction *trans_stmt;
9494 gimple_seq body = NULL;
9495 int subcode = 0;
9497 /* Wrap the transaction body in a BIND_EXPR so we have a context
9498 where to put decls for OMP. */
9499 if (TREE_CODE (tbody) != BIND_EXPR)
9501 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
9502 TREE_SIDE_EFFECTS (bind) = 1;
9503 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
9504 TRANSACTION_EXPR_BODY (expr) = bind;
9507 push_gimplify_context ();
9508 temp = voidify_wrapper_expr (*expr_p, NULL);
9510 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
9511 pop_gimplify_context (body_stmt);
9513 trans_stmt = gimple_build_transaction (body, NULL);
9514 if (TRANSACTION_EXPR_OUTER (expr))
9515 subcode = GTMA_IS_OUTER;
9516 else if (TRANSACTION_EXPR_RELAXED (expr))
9517 subcode = GTMA_IS_RELAXED;
9518 gimple_transaction_set_subcode (trans_stmt, subcode);
9520 gimplify_seq_add_stmt (pre_p, trans_stmt);
9522 if (temp)
9524 *expr_p = temp;
9525 return GS_OK;
9528 *expr_p = NULL_TREE;
9529 return GS_ALL_DONE;
9532 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
9533 is the OMP_BODY of the original EXPR (which has already been
9534 gimplified so it's not present in the EXPR).
9536 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
9538 static gimple *
9539 gimplify_omp_ordered (tree expr, gimple_seq body)
9541 tree c, decls;
9542 int failures = 0;
9543 unsigned int i;
9544 tree source_c = NULL_TREE;
9545 tree sink_c = NULL_TREE;
9547 if (gimplify_omp_ctxp)
9548 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
9549 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9550 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
9551 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
9552 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
9554 error_at (OMP_CLAUSE_LOCATION (c),
9555 "%<ordered%> construct with %<depend%> clause must be "
9556 "closely nested inside a loop with %<ordered%> clause "
9557 "with a parameter");
9558 failures++;
9560 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9561 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9563 bool fail = false;
9564 for (decls = OMP_CLAUSE_DECL (c), i = 0;
9565 decls && TREE_CODE (decls) == TREE_LIST;
9566 decls = TREE_CHAIN (decls), ++i)
9567 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
9568 continue;
9569 else if (TREE_VALUE (decls)
9570 != gimplify_omp_ctxp->loop_iter_var[2 * i])
9572 error_at (OMP_CLAUSE_LOCATION (c),
9573 "variable %qE is not an iteration "
9574 "of outermost loop %d, expected %qE",
9575 TREE_VALUE (decls), i + 1,
9576 gimplify_omp_ctxp->loop_iter_var[2 * i]);
9577 fail = true;
9578 failures++;
9580 else
9581 TREE_VALUE (decls)
9582 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
9583 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
9585 error_at (OMP_CLAUSE_LOCATION (c),
9586 "number of variables in %<depend(sink)%> "
9587 "clause does not match number of "
9588 "iteration variables");
9589 failures++;
9591 sink_c = c;
9593 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9594 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9596 if (source_c)
9598 error_at (OMP_CLAUSE_LOCATION (c),
9599 "more than one %<depend(source)%> clause on an "
9600 "%<ordered%> construct");
9601 failures++;
9603 else
9604 source_c = c;
9606 if (source_c && sink_c)
9608 error_at (OMP_CLAUSE_LOCATION (source_c),
9609 "%<depend(source)%> clause specified together with "
9610 "%<depend(sink:)%> clauses on the same construct");
9611 failures++;
9614 if (failures)
9615 return gimple_build_nop ();
9616 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
9619 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
9620 expression produces a value to be used as an operand inside a GIMPLE
9621 statement, the value will be stored back in *EXPR_P. This value will
9622 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
9623 an SSA_NAME. The corresponding sequence of GIMPLE statements is
9624 emitted in PRE_P and POST_P.
9626 Additionally, this process may overwrite parts of the input
9627 expression during gimplification. Ideally, it should be
9628 possible to do non-destructive gimplification.
9630 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
9631 the expression needs to evaluate to a value to be used as
9632 an operand in a GIMPLE statement, this value will be stored in
9633 *EXPR_P on exit. This happens when the caller specifies one
9634 of fb_lvalue or fb_rvalue fallback flags.
9636 PRE_P will contain the sequence of GIMPLE statements corresponding
9637 to the evaluation of EXPR and all the side-effects that must
9638 be executed before the main expression. On exit, the last
9639 statement of PRE_P is the core statement being gimplified. For
9640 instance, when gimplifying 'if (++a)' the last statement in
9641 PRE_P will be 'if (t.1)' where t.1 is the result of
9642 pre-incrementing 'a'.
9644 POST_P will contain the sequence of GIMPLE statements corresponding
9645 to the evaluation of all the side-effects that must be executed
9646 after the main expression. If this is NULL, the post
9647 side-effects are stored at the end of PRE_P.
9649 The reason why the output is split in two is to handle post
9650 side-effects explicitly. In some cases, an expression may have
9651 inner and outer post side-effects which need to be emitted in
9652 an order different from the one given by the recursive
9653 traversal. For instance, for the expression (*p--)++ the post
9654 side-effects of '--' must actually occur *after* the post
9655 side-effects of '++'. However, gimplification will first visit
9656 the inner expression, so if a separate POST sequence was not
9657 used, the resulting sequence would be:
9659 1 t.1 = *p
9660 2 p = p - 1
9661 3 t.2 = t.1 + 1
9662 4 *p = t.2
9664 However, the post-decrement operation in line #2 must not be
9665 evaluated until after the store to *p at line #4, so the
9666 correct sequence should be:
9668 1 t.1 = *p
9669 2 t.2 = t.1 + 1
9670 3 *p = t.2
9671 4 p = p - 1
9673 So, by specifying a separate post queue, it is possible
9674 to emit the post side-effects in the correct order.
9675 If POST_P is NULL, an internal queue will be used. Before
9676 returning to the caller, the sequence POST_P is appended to
9677 the main output sequence PRE_P.
9679 GIMPLE_TEST_F points to a function that takes a tree T and
9680 returns nonzero if T is in the GIMPLE form requested by the
9681 caller. The GIMPLE predicates are in gimple.c.
9683 FALLBACK tells the function what sort of a temporary we want if
9684 gimplification cannot produce an expression that complies with
9685 GIMPLE_TEST_F.
9687 fb_none means that no temporary should be generated
9688 fb_rvalue means that an rvalue is OK to generate
9689 fb_lvalue means that an lvalue is OK to generate
9690 fb_either means that either is OK, but an lvalue is preferable.
9691 fb_mayfail means that gimplification may fail (in which case
9692 GS_ERROR will be returned)
9694 The return value is either GS_ERROR or GS_ALL_DONE, since this
9695 function iterates until EXPR is completely gimplified or an error
9696 occurs. */
9698 enum gimplify_status
9699 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
9700 bool (*gimple_test_f) (tree), fallback_t fallback)
9702 tree tmp;
9703 gimple_seq internal_pre = NULL;
9704 gimple_seq internal_post = NULL;
9705 tree save_expr;
9706 bool is_statement;
9707 location_t saved_location;
9708 enum gimplify_status ret;
9709 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
9711 save_expr = *expr_p;
9712 if (save_expr == NULL_TREE)
9713 return GS_ALL_DONE;
9715 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
9716 is_statement = gimple_test_f == is_gimple_stmt;
9717 if (is_statement)
9718 gcc_assert (pre_p);
9720 /* Consistency checks. */
9721 if (gimple_test_f == is_gimple_reg)
9722 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
9723 else if (gimple_test_f == is_gimple_val
9724 || gimple_test_f == is_gimple_call_addr
9725 || gimple_test_f == is_gimple_condexpr
9726 || gimple_test_f == is_gimple_mem_rhs
9727 || gimple_test_f == is_gimple_mem_rhs_or_call
9728 || gimple_test_f == is_gimple_reg_rhs
9729 || gimple_test_f == is_gimple_reg_rhs_or_call
9730 || gimple_test_f == is_gimple_asm_val
9731 || gimple_test_f == is_gimple_mem_ref_addr)
9732 gcc_assert (fallback & fb_rvalue);
9733 else if (gimple_test_f == is_gimple_min_lval
9734 || gimple_test_f == is_gimple_lvalue)
9735 gcc_assert (fallback & fb_lvalue);
9736 else if (gimple_test_f == is_gimple_addressable)
9737 gcc_assert (fallback & fb_either);
9738 else if (gimple_test_f == is_gimple_stmt)
9739 gcc_assert (fallback == fb_none);
9740 else
9742 /* We should have recognized the GIMPLE_TEST_F predicate to
9743 know what kind of fallback to use in case a temporary is
9744 needed to hold the value or address of *EXPR_P. */
9745 gcc_unreachable ();
9748 /* We used to check the predicate here and return immediately if it
9749 succeeds. This is wrong; the design is for gimplification to be
9750 idempotent, and for the predicates to only test for valid forms, not
9751 whether they are fully simplified. */
9752 if (pre_p == NULL)
9753 pre_p = &internal_pre;
9755 if (post_p == NULL)
9756 post_p = &internal_post;
9758 /* Remember the last statements added to PRE_P and POST_P. Every
9759 new statement added by the gimplification helpers needs to be
9760 annotated with location information. To centralize the
9761 responsibility, we remember the last statement that had been
9762 added to both queues before gimplifying *EXPR_P. If
9763 gimplification produces new statements in PRE_P and POST_P, those
9764 statements will be annotated with the same location information
9765 as *EXPR_P. */
9766 pre_last_gsi = gsi_last (*pre_p);
9767 post_last_gsi = gsi_last (*post_p);
9769 saved_location = input_location;
9770 if (save_expr != error_mark_node
9771 && EXPR_HAS_LOCATION (*expr_p))
9772 input_location = EXPR_LOCATION (*expr_p);
9774 /* Loop over the specific gimplifiers until the toplevel node
9775 remains the same. */
9778 /* Strip away as many useless type conversions as possible
9779 at the toplevel. */
9780 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
9782 /* Remember the expr. */
9783 save_expr = *expr_p;
9785 /* Die, die, die, my darling. */
9786 if (save_expr == error_mark_node
9787 || (TREE_TYPE (save_expr)
9788 && TREE_TYPE (save_expr) == error_mark_node))
9790 ret = GS_ERROR;
9791 break;
9794 /* Do any language-specific gimplification. */
9795 ret = ((enum gimplify_status)
9796 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
9797 if (ret == GS_OK)
9799 if (*expr_p == NULL_TREE)
9800 break;
9801 if (*expr_p != save_expr)
9802 continue;
9804 else if (ret != GS_UNHANDLED)
9805 break;
9807 /* Make sure that all the cases set 'ret' appropriately. */
9808 ret = GS_UNHANDLED;
9809 switch (TREE_CODE (*expr_p))
9811 /* First deal with the special cases. */
9813 case POSTINCREMENT_EXPR:
9814 case POSTDECREMENT_EXPR:
9815 case PREINCREMENT_EXPR:
9816 case PREDECREMENT_EXPR:
9817 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
9818 fallback != fb_none,
9819 TREE_TYPE (*expr_p));
9820 break;
9822 case VIEW_CONVERT_EXPR:
9823 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
9824 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
9826 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
9827 post_p, is_gimple_val, fb_rvalue);
9828 recalculate_side_effects (*expr_p);
9829 break;
9831 /* Fallthru. */
9833 case ARRAY_REF:
9834 case ARRAY_RANGE_REF:
9835 case REALPART_EXPR:
9836 case IMAGPART_EXPR:
9837 case COMPONENT_REF:
9838 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
9839 fallback ? fallback : fb_rvalue);
9840 break;
9842 case COND_EXPR:
9843 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
9845 /* C99 code may assign to an array in a structure value of a
9846 conditional expression, and this has undefined behavior
9847 only on execution, so create a temporary if an lvalue is
9848 required. */
9849 if (fallback == fb_lvalue)
9851 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
9852 mark_addressable (*expr_p);
9853 ret = GS_OK;
9855 break;
9857 case CALL_EXPR:
9858 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
9860 /* C99 code may assign to an array in a structure returned
9861 from a function, and this has undefined behavior only on
9862 execution, so create a temporary if an lvalue is
9863 required. */
9864 if (fallback == fb_lvalue)
9866 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
9867 mark_addressable (*expr_p);
9868 ret = GS_OK;
9870 break;
9872 case TREE_LIST:
9873 gcc_unreachable ();
9875 case COMPOUND_EXPR:
9876 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
9877 break;
9879 case COMPOUND_LITERAL_EXPR:
9880 ret = gimplify_compound_literal_expr (expr_p, pre_p,
9881 gimple_test_f, fallback);
9882 break;
9884 case MODIFY_EXPR:
9885 case INIT_EXPR:
9886 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
9887 fallback != fb_none);
9888 break;
9890 case TRUTH_ANDIF_EXPR:
9891 case TRUTH_ORIF_EXPR:
9893 /* Preserve the original type of the expression and the
9894 source location of the outer expression. */
9895 tree org_type = TREE_TYPE (*expr_p);
9896 *expr_p = gimple_boolify (*expr_p);
9897 *expr_p = build3_loc (input_location, COND_EXPR,
9898 org_type, *expr_p,
9899 fold_convert_loc
9900 (input_location,
9901 org_type, boolean_true_node),
9902 fold_convert_loc
9903 (input_location,
9904 org_type, boolean_false_node));
9905 ret = GS_OK;
9906 break;
9909 case TRUTH_NOT_EXPR:
9911 tree type = TREE_TYPE (*expr_p);
9912 /* The parsers are careful to generate TRUTH_NOT_EXPR
9913 only with operands that are always zero or one.
9914 We do not fold here but handle the only interesting case
9915 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
9916 *expr_p = gimple_boolify (*expr_p);
9917 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
9918 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
9919 TREE_TYPE (*expr_p),
9920 TREE_OPERAND (*expr_p, 0));
9921 else
9922 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
9923 TREE_TYPE (*expr_p),
9924 TREE_OPERAND (*expr_p, 0),
9925 build_int_cst (TREE_TYPE (*expr_p), 1));
9926 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
9927 *expr_p = fold_convert_loc (input_location, type, *expr_p);
9928 ret = GS_OK;
9929 break;
9932 case ADDR_EXPR:
9933 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
9934 break;
9936 case ANNOTATE_EXPR:
9938 tree cond = TREE_OPERAND (*expr_p, 0);
9939 tree kind = TREE_OPERAND (*expr_p, 1);
9940 tree type = TREE_TYPE (cond);
9941 if (!INTEGRAL_TYPE_P (type))
9943 *expr_p = cond;
9944 ret = GS_OK;
9945 break;
9947 tree tmp = create_tmp_var (type);
9948 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
9949 gcall *call
9950 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
9951 gimple_call_set_lhs (call, tmp);
9952 gimplify_seq_add_stmt (pre_p, call);
9953 *expr_p = tmp;
9954 ret = GS_ALL_DONE;
9955 break;
9958 case VA_ARG_EXPR:
9959 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
9960 break;
9962 CASE_CONVERT:
9963 if (IS_EMPTY_STMT (*expr_p))
9965 ret = GS_ALL_DONE;
9966 break;
9969 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
9970 || fallback == fb_none)
9972 /* Just strip a conversion to void (or in void context) and
9973 try again. */
9974 *expr_p = TREE_OPERAND (*expr_p, 0);
9975 ret = GS_OK;
9976 break;
9979 ret = gimplify_conversion (expr_p);
9980 if (ret == GS_ERROR)
9981 break;
9982 if (*expr_p != save_expr)
9983 break;
9984 /* FALLTHRU */
9986 case FIX_TRUNC_EXPR:
9987 /* unary_expr: ... | '(' cast ')' val | ... */
9988 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
9989 is_gimple_val, fb_rvalue);
9990 recalculate_side_effects (*expr_p);
9991 break;
9993 case INDIRECT_REF:
9995 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
9996 bool notrap = TREE_THIS_NOTRAP (*expr_p);
9997 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
9999 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
10000 if (*expr_p != save_expr)
10002 ret = GS_OK;
10003 break;
10006 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10007 is_gimple_reg, fb_rvalue);
10008 if (ret == GS_ERROR)
10009 break;
10011 recalculate_side_effects (*expr_p);
10012 *expr_p = fold_build2_loc (input_location, MEM_REF,
10013 TREE_TYPE (*expr_p),
10014 TREE_OPERAND (*expr_p, 0),
10015 build_int_cst (saved_ptr_type, 0));
10016 TREE_THIS_VOLATILE (*expr_p) = volatilep;
10017 TREE_THIS_NOTRAP (*expr_p) = notrap;
10018 ret = GS_OK;
10019 break;
10022 /* We arrive here through the various re-gimplifcation paths. */
10023 case MEM_REF:
10024 /* First try re-folding the whole thing. */
10025 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
10026 TREE_OPERAND (*expr_p, 0),
10027 TREE_OPERAND (*expr_p, 1));
10028 if (tmp)
10030 REF_REVERSE_STORAGE_ORDER (tmp)
10031 = REF_REVERSE_STORAGE_ORDER (*expr_p);
10032 *expr_p = tmp;
10033 recalculate_side_effects (*expr_p);
10034 ret = GS_OK;
10035 break;
10037 /* Avoid re-gimplifying the address operand if it is already
10038 in suitable form. Re-gimplifying would mark the address
10039 operand addressable. Always gimplify when not in SSA form
10040 as we still may have to gimplify decls with value-exprs. */
10041 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
10042 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
10044 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10045 is_gimple_mem_ref_addr, fb_rvalue);
10046 if (ret == GS_ERROR)
10047 break;
10049 recalculate_side_effects (*expr_p);
10050 ret = GS_ALL_DONE;
10051 break;
10053 /* Constants need not be gimplified. */
10054 case INTEGER_CST:
10055 case REAL_CST:
10056 case FIXED_CST:
10057 case STRING_CST:
10058 case COMPLEX_CST:
10059 case VECTOR_CST:
10060 /* Drop the overflow flag on constants, we do not want
10061 that in the GIMPLE IL. */
10062 if (TREE_OVERFLOW_P (*expr_p))
10063 *expr_p = drop_tree_overflow (*expr_p);
10064 ret = GS_ALL_DONE;
10065 break;
10067 case CONST_DECL:
10068 /* If we require an lvalue, such as for ADDR_EXPR, retain the
10069 CONST_DECL node. Otherwise the decl is replaceable by its
10070 value. */
10071 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
10072 if (fallback & fb_lvalue)
10073 ret = GS_ALL_DONE;
10074 else
10076 *expr_p = DECL_INITIAL (*expr_p);
10077 ret = GS_OK;
10079 break;
10081 case DECL_EXPR:
10082 ret = gimplify_decl_expr (expr_p, pre_p);
10083 break;
10085 case BIND_EXPR:
10086 ret = gimplify_bind_expr (expr_p, pre_p);
10087 break;
10089 case LOOP_EXPR:
10090 ret = gimplify_loop_expr (expr_p, pre_p);
10091 break;
10093 case SWITCH_EXPR:
10094 ret = gimplify_switch_expr (expr_p, pre_p);
10095 break;
10097 case EXIT_EXPR:
10098 ret = gimplify_exit_expr (expr_p);
10099 break;
10101 case GOTO_EXPR:
10102 /* If the target is not LABEL, then it is a computed jump
10103 and the target needs to be gimplified. */
10104 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
10106 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
10107 NULL, is_gimple_val, fb_rvalue);
10108 if (ret == GS_ERROR)
10109 break;
10111 gimplify_seq_add_stmt (pre_p,
10112 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
10113 ret = GS_ALL_DONE;
10114 break;
10116 case PREDICT_EXPR:
10117 gimplify_seq_add_stmt (pre_p,
10118 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
10119 PREDICT_EXPR_OUTCOME (*expr_p)));
10120 ret = GS_ALL_DONE;
10121 break;
10123 case LABEL_EXPR:
10124 ret = GS_ALL_DONE;
10125 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
10126 == current_function_decl);
10127 gimplify_seq_add_stmt (pre_p,
10128 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
10129 break;
10131 case CASE_LABEL_EXPR:
10132 ret = gimplify_case_label_expr (expr_p, pre_p);
10133 break;
10135 case RETURN_EXPR:
10136 ret = gimplify_return_expr (*expr_p, pre_p);
10137 break;
10139 case CONSTRUCTOR:
10140 /* Don't reduce this in place; let gimplify_init_constructor work its
10141 magic. Buf if we're just elaborating this for side effects, just
10142 gimplify any element that has side-effects. */
10143 if (fallback == fb_none)
10145 unsigned HOST_WIDE_INT ix;
10146 tree val;
10147 tree temp = NULL_TREE;
10148 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
10149 if (TREE_SIDE_EFFECTS (val))
10150 append_to_statement_list (val, &temp);
10152 *expr_p = temp;
10153 ret = temp ? GS_OK : GS_ALL_DONE;
10155 /* C99 code may assign to an array in a constructed
10156 structure or union, and this has undefined behavior only
10157 on execution, so create a temporary if an lvalue is
10158 required. */
10159 else if (fallback == fb_lvalue)
10161 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
10162 mark_addressable (*expr_p);
10163 ret = GS_OK;
10165 else
10166 ret = GS_ALL_DONE;
10167 break;
10169 /* The following are special cases that are not handled by the
10170 original GIMPLE grammar. */
10172 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
10173 eliminated. */
10174 case SAVE_EXPR:
10175 ret = gimplify_save_expr (expr_p, pre_p, post_p);
10176 break;
10178 case BIT_FIELD_REF:
10179 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10180 post_p, is_gimple_lvalue, fb_either);
10181 recalculate_side_effects (*expr_p);
10182 break;
10184 case TARGET_MEM_REF:
10186 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
10188 if (TMR_BASE (*expr_p))
10189 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
10190 post_p, is_gimple_mem_ref_addr, fb_either);
10191 if (TMR_INDEX (*expr_p))
10192 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
10193 post_p, is_gimple_val, fb_rvalue);
10194 if (TMR_INDEX2 (*expr_p))
10195 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
10196 post_p, is_gimple_val, fb_rvalue);
10197 /* TMR_STEP and TMR_OFFSET are always integer constants. */
10198 ret = MIN (r0, r1);
10200 break;
10202 case NON_LVALUE_EXPR:
10203 /* This should have been stripped above. */
10204 gcc_unreachable ();
10206 case ASM_EXPR:
10207 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
10208 break;
10210 case TRY_FINALLY_EXPR:
10211 case TRY_CATCH_EXPR:
10213 gimple_seq eval, cleanup;
10214 gtry *try_;
10216 /* Calls to destructors are generated automatically in FINALLY/CATCH
10217 block. They should have location as UNKNOWN_LOCATION. However,
10218 gimplify_call_expr will reset these call stmts to input_location
10219 if it finds stmt's location is unknown. To prevent resetting for
10220 destructors, we set the input_location to unknown.
10221 Note that this only affects the destructor calls in FINALLY/CATCH
10222 block, and will automatically reset to its original value by the
10223 end of gimplify_expr. */
10224 input_location = UNKNOWN_LOCATION;
10225 eval = cleanup = NULL;
10226 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
10227 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
10228 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
10229 if (gimple_seq_empty_p (cleanup))
10231 gimple_seq_add_seq (pre_p, eval);
10232 ret = GS_ALL_DONE;
10233 break;
10235 try_ = gimple_build_try (eval, cleanup,
10236 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
10237 ? GIMPLE_TRY_FINALLY
10238 : GIMPLE_TRY_CATCH);
10239 if (EXPR_HAS_LOCATION (save_expr))
10240 gimple_set_location (try_, EXPR_LOCATION (save_expr));
10241 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
10242 gimple_set_location (try_, saved_location);
10243 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
10244 gimple_try_set_catch_is_cleanup (try_,
10245 TRY_CATCH_IS_CLEANUP (*expr_p));
10246 gimplify_seq_add_stmt (pre_p, try_);
10247 ret = GS_ALL_DONE;
10248 break;
10251 case CLEANUP_POINT_EXPR:
10252 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
10253 break;
10255 case TARGET_EXPR:
10256 ret = gimplify_target_expr (expr_p, pre_p, post_p);
10257 break;
10259 case CATCH_EXPR:
10261 gimple *c;
10262 gimple_seq handler = NULL;
10263 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
10264 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
10265 gimplify_seq_add_stmt (pre_p, c);
10266 ret = GS_ALL_DONE;
10267 break;
10270 case EH_FILTER_EXPR:
10272 gimple *ehf;
10273 gimple_seq failure = NULL;
10275 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
10276 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
10277 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
10278 gimplify_seq_add_stmt (pre_p, ehf);
10279 ret = GS_ALL_DONE;
10280 break;
10283 case OBJ_TYPE_REF:
10285 enum gimplify_status r0, r1;
10286 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
10287 post_p, is_gimple_val, fb_rvalue);
10288 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
10289 post_p, is_gimple_val, fb_rvalue);
10290 TREE_SIDE_EFFECTS (*expr_p) = 0;
10291 ret = MIN (r0, r1);
10293 break;
10295 case LABEL_DECL:
10296 /* We get here when taking the address of a label. We mark
10297 the label as "forced"; meaning it can never be removed and
10298 it is a potential target for any computed goto. */
10299 FORCED_LABEL (*expr_p) = 1;
10300 ret = GS_ALL_DONE;
10301 break;
10303 case STATEMENT_LIST:
10304 ret = gimplify_statement_list (expr_p, pre_p);
10305 break;
10307 case WITH_SIZE_EXPR:
10309 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10310 post_p == &internal_post ? NULL : post_p,
10311 gimple_test_f, fallback);
10312 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
10313 is_gimple_val, fb_rvalue);
10314 ret = GS_ALL_DONE;
10316 break;
10318 case VAR_DECL:
10319 case PARM_DECL:
10320 ret = gimplify_var_or_parm_decl (expr_p);
10321 break;
10323 case RESULT_DECL:
10324 /* When within an OMP context, notice uses of variables. */
10325 if (gimplify_omp_ctxp)
10326 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
10327 ret = GS_ALL_DONE;
10328 break;
10330 case SSA_NAME:
10331 /* Allow callbacks into the gimplifier during optimization. */
10332 ret = GS_ALL_DONE;
10333 break;
10335 case OMP_PARALLEL:
10336 gimplify_omp_parallel (expr_p, pre_p);
10337 ret = GS_ALL_DONE;
10338 break;
10340 case OMP_TASK:
10341 gimplify_omp_task (expr_p, pre_p);
10342 ret = GS_ALL_DONE;
10343 break;
10345 case OMP_FOR:
10346 case OMP_SIMD:
10347 case CILK_SIMD:
10348 case CILK_FOR:
10349 case OMP_DISTRIBUTE:
10350 case OMP_TASKLOOP:
10351 case OACC_LOOP:
10352 ret = gimplify_omp_for (expr_p, pre_p);
10353 break;
10355 case OACC_CACHE:
10356 gimplify_oacc_cache (expr_p, pre_p);
10357 ret = GS_ALL_DONE;
10358 break;
10360 case OACC_HOST_DATA:
10361 sorry ("directive not yet implemented");
10362 ret = GS_ALL_DONE;
10363 break;
10365 case OACC_DECLARE:
10366 gimplify_oacc_declare (expr_p, pre_p);
10367 ret = GS_ALL_DONE;
10368 break;
10370 case OACC_DATA:
10371 case OACC_KERNELS:
10372 case OACC_PARALLEL:
10373 case OMP_SECTIONS:
10374 case OMP_SINGLE:
10375 case OMP_TARGET:
10376 case OMP_TARGET_DATA:
10377 case OMP_TEAMS:
10378 gimplify_omp_workshare (expr_p, pre_p);
10379 ret = GS_ALL_DONE;
10380 break;
10382 case OACC_ENTER_DATA:
10383 case OACC_EXIT_DATA:
10384 case OACC_UPDATE:
10385 case OMP_TARGET_UPDATE:
10386 case OMP_TARGET_ENTER_DATA:
10387 case OMP_TARGET_EXIT_DATA:
10388 gimplify_omp_target_update (expr_p, pre_p);
10389 ret = GS_ALL_DONE;
10390 break;
10392 case OMP_SECTION:
10393 case OMP_MASTER:
10394 case OMP_TASKGROUP:
10395 case OMP_ORDERED:
10396 case OMP_CRITICAL:
10398 gimple_seq body = NULL;
10399 gimple *g;
10401 gimplify_and_add (OMP_BODY (*expr_p), &body);
10402 switch (TREE_CODE (*expr_p))
10404 case OMP_SECTION:
10405 g = gimple_build_omp_section (body);
10406 break;
10407 case OMP_MASTER:
10408 g = gimple_build_omp_master (body);
10409 break;
10410 case OMP_TASKGROUP:
10412 gimple_seq cleanup = NULL;
10413 tree fn
10414 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
10415 g = gimple_build_call (fn, 0);
10416 gimple_seq_add_stmt (&cleanup, g);
10417 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10418 body = NULL;
10419 gimple_seq_add_stmt (&body, g);
10420 g = gimple_build_omp_taskgroup (body);
10422 break;
10423 case OMP_ORDERED:
10424 g = gimplify_omp_ordered (*expr_p, body);
10425 break;
10426 case OMP_CRITICAL:
10427 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
10428 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
10429 gimplify_adjust_omp_clauses (pre_p,
10430 &OMP_CRITICAL_CLAUSES (*expr_p),
10431 OMP_CRITICAL);
10432 g = gimple_build_omp_critical (body,
10433 OMP_CRITICAL_NAME (*expr_p),
10434 OMP_CRITICAL_CLAUSES (*expr_p));
10435 break;
10436 default:
10437 gcc_unreachable ();
10439 gimplify_seq_add_stmt (pre_p, g);
10440 ret = GS_ALL_DONE;
10441 break;
10444 case OMP_ATOMIC:
10445 case OMP_ATOMIC_READ:
10446 case OMP_ATOMIC_CAPTURE_OLD:
10447 case OMP_ATOMIC_CAPTURE_NEW:
10448 ret = gimplify_omp_atomic (expr_p, pre_p);
10449 break;
10451 case TRANSACTION_EXPR:
10452 ret = gimplify_transaction (expr_p, pre_p);
10453 break;
10455 case TRUTH_AND_EXPR:
10456 case TRUTH_OR_EXPR:
10457 case TRUTH_XOR_EXPR:
10459 tree orig_type = TREE_TYPE (*expr_p);
10460 tree new_type, xop0, xop1;
10461 *expr_p = gimple_boolify (*expr_p);
10462 new_type = TREE_TYPE (*expr_p);
10463 if (!useless_type_conversion_p (orig_type, new_type))
10465 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
10466 ret = GS_OK;
10467 break;
10470 /* Boolified binary truth expressions are semantically equivalent
10471 to bitwise binary expressions. Canonicalize them to the
10472 bitwise variant. */
10473 switch (TREE_CODE (*expr_p))
10475 case TRUTH_AND_EXPR:
10476 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
10477 break;
10478 case TRUTH_OR_EXPR:
10479 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
10480 break;
10481 case TRUTH_XOR_EXPR:
10482 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
10483 break;
10484 default:
10485 break;
10487 /* Now make sure that operands have compatible type to
10488 expression's new_type. */
10489 xop0 = TREE_OPERAND (*expr_p, 0);
10490 xop1 = TREE_OPERAND (*expr_p, 1);
10491 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
10492 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
10493 new_type,
10494 xop0);
10495 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
10496 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
10497 new_type,
10498 xop1);
10499 /* Continue classified as tcc_binary. */
10500 goto expr_2;
10503 case FMA_EXPR:
10504 case VEC_COND_EXPR:
10505 case VEC_PERM_EXPR:
10506 /* Classified as tcc_expression. */
10507 goto expr_3;
10509 case POINTER_PLUS_EXPR:
10511 enum gimplify_status r0, r1;
10512 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10513 post_p, is_gimple_val, fb_rvalue);
10514 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10515 post_p, is_gimple_val, fb_rvalue);
10516 recalculate_side_effects (*expr_p);
10517 ret = MIN (r0, r1);
10518 break;
10521 case CILK_SYNC_STMT:
10523 if (!fn_contains_cilk_spawn_p (cfun))
10525 error_at (EXPR_LOCATION (*expr_p),
10526 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
10527 ret = GS_ERROR;
10529 else
10531 gimplify_cilk_sync (expr_p, pre_p);
10532 ret = GS_ALL_DONE;
10534 break;
10537 default:
10538 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
10540 case tcc_comparison:
10541 /* Handle comparison of objects of non scalar mode aggregates
10542 with a call to memcmp. It would be nice to only have to do
10543 this for variable-sized objects, but then we'd have to allow
10544 the same nest of reference nodes we allow for MODIFY_EXPR and
10545 that's too complex.
10547 Compare scalar mode aggregates as scalar mode values. Using
10548 memcmp for them would be very inefficient at best, and is
10549 plain wrong if bitfields are involved. */
10551 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
10553 /* Vector comparisons need no boolification. */
10554 if (TREE_CODE (type) == VECTOR_TYPE)
10555 goto expr_2;
10556 else if (!AGGREGATE_TYPE_P (type))
10558 tree org_type = TREE_TYPE (*expr_p);
10559 *expr_p = gimple_boolify (*expr_p);
10560 if (!useless_type_conversion_p (org_type,
10561 TREE_TYPE (*expr_p)))
10563 *expr_p = fold_convert_loc (input_location,
10564 org_type, *expr_p);
10565 ret = GS_OK;
10567 else
10568 goto expr_2;
10570 else if (TYPE_MODE (type) != BLKmode)
10571 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
10572 else
10573 ret = gimplify_variable_sized_compare (expr_p);
10575 break;
10578 /* If *EXPR_P does not need to be special-cased, handle it
10579 according to its class. */
10580 case tcc_unary:
10581 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10582 post_p, is_gimple_val, fb_rvalue);
10583 break;
10585 case tcc_binary:
10586 expr_2:
10588 enum gimplify_status r0, r1;
10590 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10591 post_p, is_gimple_val, fb_rvalue);
10592 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10593 post_p, is_gimple_val, fb_rvalue);
10595 ret = MIN (r0, r1);
10596 break;
10599 expr_3:
10601 enum gimplify_status r0, r1, r2;
10603 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10604 post_p, is_gimple_val, fb_rvalue);
10605 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10606 post_p, is_gimple_val, fb_rvalue);
10607 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
10608 post_p, is_gimple_val, fb_rvalue);
10610 ret = MIN (MIN (r0, r1), r2);
10611 break;
10614 case tcc_declaration:
10615 case tcc_constant:
10616 ret = GS_ALL_DONE;
10617 goto dont_recalculate;
10619 default:
10620 gcc_unreachable ();
10623 recalculate_side_effects (*expr_p);
10625 dont_recalculate:
10626 break;
10629 gcc_assert (*expr_p || ret != GS_OK);
10631 while (ret == GS_OK);
10633 /* If we encountered an error_mark somewhere nested inside, either
10634 stub out the statement or propagate the error back out. */
10635 if (ret == GS_ERROR)
10637 if (is_statement)
10638 *expr_p = NULL;
10639 goto out;
10642 /* This was only valid as a return value from the langhook, which
10643 we handled. Make sure it doesn't escape from any other context. */
10644 gcc_assert (ret != GS_UNHANDLED);
10646 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
10648 /* We aren't looking for a value, and we don't have a valid
10649 statement. If it doesn't have side-effects, throw it away. */
10650 if (!TREE_SIDE_EFFECTS (*expr_p))
10651 *expr_p = NULL;
10652 else if (!TREE_THIS_VOLATILE (*expr_p))
10654 /* This is probably a _REF that contains something nested that
10655 has side effects. Recurse through the operands to find it. */
10656 enum tree_code code = TREE_CODE (*expr_p);
10658 switch (code)
10660 case COMPONENT_REF:
10661 case REALPART_EXPR:
10662 case IMAGPART_EXPR:
10663 case VIEW_CONVERT_EXPR:
10664 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10665 gimple_test_f, fallback);
10666 break;
10668 case ARRAY_REF:
10669 case ARRAY_RANGE_REF:
10670 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10671 gimple_test_f, fallback);
10672 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
10673 gimple_test_f, fallback);
10674 break;
10676 default:
10677 /* Anything else with side-effects must be converted to
10678 a valid statement before we get here. */
10679 gcc_unreachable ();
10682 *expr_p = NULL;
10684 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
10685 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
10687 /* Historically, the compiler has treated a bare reference
10688 to a non-BLKmode volatile lvalue as forcing a load. */
10689 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
10691 /* Normally, we do not want to create a temporary for a
10692 TREE_ADDRESSABLE type because such a type should not be
10693 copied by bitwise-assignment. However, we make an
10694 exception here, as all we are doing here is ensuring that
10695 we read the bytes that make up the type. We use
10696 create_tmp_var_raw because create_tmp_var will abort when
10697 given a TREE_ADDRESSABLE type. */
10698 tree tmp = create_tmp_var_raw (type, "vol");
10699 gimple_add_tmp_var (tmp);
10700 gimplify_assign (tmp, *expr_p, pre_p);
10701 *expr_p = NULL;
10703 else
10704 /* We can't do anything useful with a volatile reference to
10705 an incomplete type, so just throw it away. Likewise for
10706 a BLKmode type, since any implicit inner load should
10707 already have been turned into an explicit one by the
10708 gimplification process. */
10709 *expr_p = NULL;
10712 /* If we are gimplifying at the statement level, we're done. Tack
10713 everything together and return. */
10714 if (fallback == fb_none || is_statement)
10716 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
10717 it out for GC to reclaim it. */
10718 *expr_p = NULL_TREE;
10720 if (!gimple_seq_empty_p (internal_pre)
10721 || !gimple_seq_empty_p (internal_post))
10723 gimplify_seq_add_seq (&internal_pre, internal_post);
10724 gimplify_seq_add_seq (pre_p, internal_pre);
10727 /* The result of gimplifying *EXPR_P is going to be the last few
10728 statements in *PRE_P and *POST_P. Add location information
10729 to all the statements that were added by the gimplification
10730 helpers. */
10731 if (!gimple_seq_empty_p (*pre_p))
10732 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
10734 if (!gimple_seq_empty_p (*post_p))
10735 annotate_all_with_location_after (*post_p, post_last_gsi,
10736 input_location);
10738 goto out;
10741 #ifdef ENABLE_GIMPLE_CHECKING
10742 if (*expr_p)
10744 enum tree_code code = TREE_CODE (*expr_p);
10745 /* These expressions should already be in gimple IR form. */
10746 gcc_assert (code != MODIFY_EXPR
10747 && code != ASM_EXPR
10748 && code != BIND_EXPR
10749 && code != CATCH_EXPR
10750 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
10751 && code != EH_FILTER_EXPR
10752 && code != GOTO_EXPR
10753 && code != LABEL_EXPR
10754 && code != LOOP_EXPR
10755 && code != SWITCH_EXPR
10756 && code != TRY_FINALLY_EXPR
10757 && code != OACC_PARALLEL
10758 && code != OACC_KERNELS
10759 && code != OACC_DATA
10760 && code != OACC_HOST_DATA
10761 && code != OACC_DECLARE
10762 && code != OACC_UPDATE
10763 && code != OACC_ENTER_DATA
10764 && code != OACC_EXIT_DATA
10765 && code != OACC_CACHE
10766 && code != OMP_CRITICAL
10767 && code != OMP_FOR
10768 && code != OACC_LOOP
10769 && code != OMP_MASTER
10770 && code != OMP_TASKGROUP
10771 && code != OMP_ORDERED
10772 && code != OMP_PARALLEL
10773 && code != OMP_SECTIONS
10774 && code != OMP_SECTION
10775 && code != OMP_SINGLE);
10777 #endif
10779 /* Otherwise we're gimplifying a subexpression, so the resulting
10780 value is interesting. If it's a valid operand that matches
10781 GIMPLE_TEST_F, we're done. Unless we are handling some
10782 post-effects internally; if that's the case, we need to copy into
10783 a temporary before adding the post-effects to POST_P. */
10784 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
10785 goto out;
10787 /* Otherwise, we need to create a new temporary for the gimplified
10788 expression. */
10790 /* We can't return an lvalue if we have an internal postqueue. The
10791 object the lvalue refers to would (probably) be modified by the
10792 postqueue; we need to copy the value out first, which means an
10793 rvalue. */
10794 if ((fallback & fb_lvalue)
10795 && gimple_seq_empty_p (internal_post)
10796 && is_gimple_addressable (*expr_p))
10798 /* An lvalue will do. Take the address of the expression, store it
10799 in a temporary, and replace the expression with an INDIRECT_REF of
10800 that temporary. */
10801 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
10802 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
10803 *expr_p = build_simple_mem_ref (tmp);
10805 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
10807 /* An rvalue will do. Assign the gimplified expression into a
10808 new temporary TMP and replace the original expression with
10809 TMP. First, make sure that the expression has a type so that
10810 it can be assigned into a temporary. */
10811 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
10812 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
10814 else
10816 #ifdef ENABLE_GIMPLE_CHECKING
10817 if (!(fallback & fb_mayfail))
10819 fprintf (stderr, "gimplification failed:\n");
10820 print_generic_expr (stderr, *expr_p, 0);
10821 debug_tree (*expr_p);
10822 internal_error ("gimplification failed");
10824 #endif
10825 gcc_assert (fallback & fb_mayfail);
10827 /* If this is an asm statement, and the user asked for the
10828 impossible, don't die. Fail and let gimplify_asm_expr
10829 issue an error. */
10830 ret = GS_ERROR;
10831 goto out;
10834 /* Make sure the temporary matches our predicate. */
10835 gcc_assert ((*gimple_test_f) (*expr_p));
10837 if (!gimple_seq_empty_p (internal_post))
10839 annotate_all_with_location (internal_post, input_location);
10840 gimplify_seq_add_seq (pre_p, internal_post);
10843 out:
10844 input_location = saved_location;
10845 return ret;
10848 /* Look through TYPE for variable-sized objects and gimplify each such
10849 size that we find. Add to LIST_P any statements generated. */
10851 void
10852 gimplify_type_sizes (tree type, gimple_seq *list_p)
10854 tree field, t;
10856 if (type == NULL || type == error_mark_node)
10857 return;
10859 /* We first do the main variant, then copy into any other variants. */
10860 type = TYPE_MAIN_VARIANT (type);
10862 /* Avoid infinite recursion. */
10863 if (TYPE_SIZES_GIMPLIFIED (type))
10864 return;
10866 TYPE_SIZES_GIMPLIFIED (type) = 1;
10868 switch (TREE_CODE (type))
10870 case INTEGER_TYPE:
10871 case ENUMERAL_TYPE:
10872 case BOOLEAN_TYPE:
10873 case REAL_TYPE:
10874 case FIXED_POINT_TYPE:
10875 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
10876 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
10878 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
10880 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
10881 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
10883 break;
10885 case ARRAY_TYPE:
10886 /* These types may not have declarations, so handle them here. */
10887 gimplify_type_sizes (TREE_TYPE (type), list_p);
10888 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
10889 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
10890 with assigned stack slots, for -O1+ -g they should be tracked
10891 by VTA. */
10892 if (!(TYPE_NAME (type)
10893 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
10894 && DECL_IGNORED_P (TYPE_NAME (type)))
10895 && TYPE_DOMAIN (type)
10896 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
10898 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
10899 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
10900 DECL_IGNORED_P (t) = 0;
10901 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
10902 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
10903 DECL_IGNORED_P (t) = 0;
10905 break;
10907 case RECORD_TYPE:
10908 case UNION_TYPE:
10909 case QUAL_UNION_TYPE:
10910 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
10911 if (TREE_CODE (field) == FIELD_DECL)
10913 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
10914 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
10915 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
10916 gimplify_type_sizes (TREE_TYPE (field), list_p);
10918 break;
10920 case POINTER_TYPE:
10921 case REFERENCE_TYPE:
10922 /* We used to recurse on the pointed-to type here, which turned out to
10923 be incorrect because its definition might refer to variables not
10924 yet initialized at this point if a forward declaration is involved.
10926 It was actually useful for anonymous pointed-to types to ensure
10927 that the sizes evaluation dominates every possible later use of the
10928 values. Restricting to such types here would be safe since there
10929 is no possible forward declaration around, but would introduce an
10930 undesirable middle-end semantic to anonymity. We then defer to
10931 front-ends the responsibility of ensuring that the sizes are
10932 evaluated both early and late enough, e.g. by attaching artificial
10933 type declarations to the tree. */
10934 break;
10936 default:
10937 break;
10940 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
10941 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
10943 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
10945 TYPE_SIZE (t) = TYPE_SIZE (type);
10946 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
10947 TYPE_SIZES_GIMPLIFIED (t) = 1;
10951 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
10952 a size or position, has had all of its SAVE_EXPRs evaluated.
10953 We add any required statements to *STMT_P. */
10955 void
10956 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
10958 tree expr = *expr_p;
10960 /* We don't do anything if the value isn't there, is constant, or contains
10961 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
10962 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
10963 will want to replace it with a new variable, but that will cause problems
10964 if this type is from outside the function. It's OK to have that here. */
10965 if (is_gimple_sizepos (expr))
10966 return;
10968 *expr_p = unshare_expr (expr);
10970 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
10973 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
10974 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
10975 is true, also gimplify the parameters. */
10977 gbind *
10978 gimplify_body (tree fndecl, bool do_parms)
10980 location_t saved_location = input_location;
10981 gimple_seq parm_stmts, seq;
10982 gimple *outer_stmt;
10983 gbind *outer_bind;
10984 struct cgraph_node *cgn;
10986 timevar_push (TV_TREE_GIMPLIFY);
10988 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
10989 gimplification. */
10990 default_rtl_profile ();
10992 gcc_assert (gimplify_ctxp == NULL);
10993 push_gimplify_context ();
10995 if (flag_openacc || flag_openmp)
10997 gcc_assert (gimplify_omp_ctxp == NULL);
10998 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
10999 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
11002 /* Unshare most shared trees in the body and in that of any nested functions.
11003 It would seem we don't have to do this for nested functions because
11004 they are supposed to be output and then the outer function gimplified
11005 first, but the g++ front end doesn't always do it that way. */
11006 unshare_body (fndecl);
11007 unvisit_body (fndecl);
11009 cgn = cgraph_node::get (fndecl);
11010 if (cgn && cgn->origin)
11011 nonlocal_vlas = new hash_set<tree>;
11013 /* Make sure input_location isn't set to something weird. */
11014 input_location = DECL_SOURCE_LOCATION (fndecl);
11016 /* Resolve callee-copies. This has to be done before processing
11017 the body so that DECL_VALUE_EXPR gets processed correctly. */
11018 parm_stmts = do_parms ? gimplify_parameters () : NULL;
11020 /* Gimplify the function's body. */
11021 seq = NULL;
11022 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
11023 outer_stmt = gimple_seq_first_stmt (seq);
11024 if (!outer_stmt)
11026 outer_stmt = gimple_build_nop ();
11027 gimplify_seq_add_stmt (&seq, outer_stmt);
11030 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
11031 not the case, wrap everything in a GIMPLE_BIND to make it so. */
11032 if (gimple_code (outer_stmt) == GIMPLE_BIND
11033 && gimple_seq_first (seq) == gimple_seq_last (seq))
11034 outer_bind = as_a <gbind *> (outer_stmt);
11035 else
11036 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
11038 DECL_SAVED_TREE (fndecl) = NULL_TREE;
11040 /* If we had callee-copies statements, insert them at the beginning
11041 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
11042 if (!gimple_seq_empty_p (parm_stmts))
11044 tree parm;
11046 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
11047 gimple_bind_set_body (outer_bind, parm_stmts);
11049 for (parm = DECL_ARGUMENTS (current_function_decl);
11050 parm; parm = DECL_CHAIN (parm))
11051 if (DECL_HAS_VALUE_EXPR_P (parm))
11053 DECL_HAS_VALUE_EXPR_P (parm) = 0;
11054 DECL_IGNORED_P (parm) = 0;
11058 if (nonlocal_vlas)
11060 if (nonlocal_vla_vars)
11062 /* tree-nested.c may later on call declare_vars (..., true);
11063 which relies on BLOCK_VARS chain to be the tail of the
11064 gimple_bind_vars chain. Ensure we don't violate that
11065 assumption. */
11066 if (gimple_bind_block (outer_bind)
11067 == DECL_INITIAL (current_function_decl))
11068 declare_vars (nonlocal_vla_vars, outer_bind, true);
11069 else
11070 BLOCK_VARS (DECL_INITIAL (current_function_decl))
11071 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
11072 nonlocal_vla_vars);
11073 nonlocal_vla_vars = NULL_TREE;
11075 delete nonlocal_vlas;
11076 nonlocal_vlas = NULL;
11079 if ((flag_openacc || flag_openmp || flag_openmp_simd)
11080 && gimplify_omp_ctxp)
11082 delete_omp_context (gimplify_omp_ctxp);
11083 gimplify_omp_ctxp = NULL;
11086 pop_gimplify_context (outer_bind);
11087 gcc_assert (gimplify_ctxp == NULL);
11089 if (flag_checking && !seen_error ())
11090 verify_gimple_in_seq (gimple_bind_body (outer_bind));
11092 timevar_pop (TV_TREE_GIMPLIFY);
11093 input_location = saved_location;
11095 return outer_bind;
11098 typedef char *char_p; /* For DEF_VEC_P. */
11100 /* Return whether we should exclude FNDECL from instrumentation. */
11102 static bool
11103 flag_instrument_functions_exclude_p (tree fndecl)
11105 vec<char_p> *v;
11107 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
11108 if (v && v->length () > 0)
11110 const char *name;
11111 int i;
11112 char *s;
11114 name = lang_hooks.decl_printable_name (fndecl, 0);
11115 FOR_EACH_VEC_ELT (*v, i, s)
11116 if (strstr (name, s) != NULL)
11117 return true;
11120 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
11121 if (v && v->length () > 0)
11123 const char *name;
11124 int i;
11125 char *s;
11127 name = DECL_SOURCE_FILE (fndecl);
11128 FOR_EACH_VEC_ELT (*v, i, s)
11129 if (strstr (name, s) != NULL)
11130 return true;
11133 return false;
11136 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
11137 node for the function we want to gimplify.
11139 Return the sequence of GIMPLE statements corresponding to the body
11140 of FNDECL. */
11142 void
11143 gimplify_function_tree (tree fndecl)
11145 tree parm, ret;
11146 gimple_seq seq;
11147 gbind *bind;
11149 gcc_assert (!gimple_body (fndecl));
11151 if (DECL_STRUCT_FUNCTION (fndecl))
11152 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
11153 else
11154 push_struct_function (fndecl);
11156 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
11157 if necessary. */
11158 cfun->curr_properties |= PROP_gimple_lva;
11160 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
11162 /* Preliminarily mark non-addressed complex variables as eligible
11163 for promotion to gimple registers. We'll transform their uses
11164 as we find them. */
11165 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
11166 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
11167 && !TREE_THIS_VOLATILE (parm)
11168 && !needs_to_live_in_memory (parm))
11169 DECL_GIMPLE_REG_P (parm) = 1;
11172 ret = DECL_RESULT (fndecl);
11173 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
11174 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
11175 && !needs_to_live_in_memory (ret))
11176 DECL_GIMPLE_REG_P (ret) = 1;
11178 bind = gimplify_body (fndecl, true);
11180 /* The tree body of the function is no longer needed, replace it
11181 with the new GIMPLE body. */
11182 seq = NULL;
11183 gimple_seq_add_stmt (&seq, bind);
11184 gimple_set_body (fndecl, seq);
11186 /* If we're instrumenting function entry/exit, then prepend the call to
11187 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
11188 catch the exit hook. */
11189 /* ??? Add some way to ignore exceptions for this TFE. */
11190 if (flag_instrument_function_entry_exit
11191 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
11192 && !flag_instrument_functions_exclude_p (fndecl))
11194 tree x;
11195 gbind *new_bind;
11196 gimple *tf;
11197 gimple_seq cleanup = NULL, body = NULL;
11198 tree tmp_var;
11199 gcall *call;
11201 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
11202 call = gimple_build_call (x, 1, integer_zero_node);
11203 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11204 gimple_call_set_lhs (call, tmp_var);
11205 gimplify_seq_add_stmt (&cleanup, call);
11206 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
11207 call = gimple_build_call (x, 2,
11208 build_fold_addr_expr (current_function_decl),
11209 tmp_var);
11210 gimplify_seq_add_stmt (&cleanup, call);
11211 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
11213 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
11214 call = gimple_build_call (x, 1, integer_zero_node);
11215 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11216 gimple_call_set_lhs (call, tmp_var);
11217 gimplify_seq_add_stmt (&body, call);
11218 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
11219 call = gimple_build_call (x, 2,
11220 build_fold_addr_expr (current_function_decl),
11221 tmp_var);
11222 gimplify_seq_add_stmt (&body, call);
11223 gimplify_seq_add_stmt (&body, tf);
11224 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
11225 /* Clear the block for BIND, since it is no longer directly inside
11226 the function, but within a try block. */
11227 gimple_bind_set_block (bind, NULL);
11229 /* Replace the current function body with the body
11230 wrapped in the try/finally TF. */
11231 seq = NULL;
11232 gimple_seq_add_stmt (&seq, new_bind);
11233 gimple_set_body (fndecl, seq);
11234 bind = new_bind;
11237 if ((flag_sanitize & SANITIZE_THREAD) != 0
11238 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
11240 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
11241 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
11242 gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
11243 /* Clear the block for BIND, since it is no longer directly inside
11244 the function, but within a try block. */
11245 gimple_bind_set_block (bind, NULL);
11246 /* Replace the current function body with the body
11247 wrapped in the try/finally TF. */
11248 seq = NULL;
11249 gimple_seq_add_stmt (&seq, new_bind);
11250 gimple_set_body (fndecl, seq);
11253 DECL_SAVED_TREE (fndecl) = NULL_TREE;
11254 cfun->curr_properties |= PROP_gimple_any;
11256 pop_cfun ();
11258 dump_function (TDI_generic, fndecl);
11261 /* Return a dummy expression of type TYPE in order to keep going after an
11262 error. */
11264 static tree
11265 dummy_object (tree type)
11267 tree t = build_int_cst (build_pointer_type (type), 0);
11268 return build2 (MEM_REF, type, t, t);
11271 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
11272 builtin function, but a very special sort of operator. */
11274 enum gimplify_status
11275 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
11276 gimple_seq *post_p ATTRIBUTE_UNUSED)
11278 tree promoted_type, have_va_type;
11279 tree valist = TREE_OPERAND (*expr_p, 0);
11280 tree type = TREE_TYPE (*expr_p);
11281 tree t, tag;
11282 location_t loc = EXPR_LOCATION (*expr_p);
11284 /* Verify that valist is of the proper type. */
11285 have_va_type = TREE_TYPE (valist);
11286 if (have_va_type == error_mark_node)
11287 return GS_ERROR;
11288 have_va_type = targetm.canonical_va_list_type (have_va_type);
11290 if (have_va_type == NULL_TREE)
11292 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
11293 return GS_ERROR;
11296 /* Generate a diagnostic for requesting data of a type that cannot
11297 be passed through `...' due to type promotion at the call site. */
11298 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
11299 != type)
11301 static bool gave_help;
11302 bool warned;
11304 /* Unfortunately, this is merely undefined, rather than a constraint
11305 violation, so we cannot make this an error. If this call is never
11306 executed, the program is still strictly conforming. */
11307 warned = warning_at (loc, 0,
11308 "%qT is promoted to %qT when passed through %<...%>",
11309 type, promoted_type);
11310 if (!gave_help && warned)
11312 gave_help = true;
11313 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
11314 promoted_type, type);
11317 /* We can, however, treat "undefined" any way we please.
11318 Call abort to encourage the user to fix the program. */
11319 if (warned)
11320 inform (loc, "if this code is reached, the program will abort");
11321 /* Before the abort, allow the evaluation of the va_list
11322 expression to exit or longjmp. */
11323 gimplify_and_add (valist, pre_p);
11324 t = build_call_expr_loc (loc,
11325 builtin_decl_implicit (BUILT_IN_TRAP), 0);
11326 gimplify_and_add (t, pre_p);
11328 /* This is dead code, but go ahead and finish so that the
11329 mode of the result comes out right. */
11330 *expr_p = dummy_object (type);
11331 return GS_ALL_DONE;
11334 tag = build_int_cst (build_pointer_type (type), 0);
11335 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 2, valist, tag);
11337 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
11338 needs to be expanded. */
11339 cfun->curr_properties &= ~PROP_gimple_lva;
11341 return GS_OK;
11344 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
11346 DST/SRC are the destination and source respectively. You can pass
11347 ungimplified trees in DST or SRC, in which case they will be
11348 converted to a gimple operand if necessary.
11350 This function returns the newly created GIMPLE_ASSIGN tuple. */
11352 gimple *
11353 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
11355 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11356 gimplify_and_add (t, seq_p);
11357 ggc_free (t);
11358 return gimple_seq_last_stmt (*seq_p);
11361 inline hashval_t
11362 gimplify_hasher::hash (const elt_t *p)
11364 tree t = p->val;
11365 return iterative_hash_expr (t, 0);
11368 inline bool
11369 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
11371 tree t1 = p1->val;
11372 tree t2 = p2->val;
11373 enum tree_code code = TREE_CODE (t1);
11375 if (TREE_CODE (t2) != code
11376 || TREE_TYPE (t1) != TREE_TYPE (t2))
11377 return false;
11379 if (!operand_equal_p (t1, t2, 0))
11380 return false;
11382 /* Only allow them to compare equal if they also hash equal; otherwise
11383 results are nondeterminate, and we fail bootstrap comparison. */
11384 gcc_checking_assert (hash (p1) == hash (p2));
11386 return true;