Simplify X * C1 == C2 with undefined overflow
[official-gcc.git] / gcc / omp-low.c
blob52c2cae394a9fa9434330a4379ad24c1945fed0d
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2020 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
63 /* Lowering of OMP parallel and workshare constructs proceeds in two
64 phases. The first phase scans the function looking for OMP statements
65 and then for variables that must be replaced to satisfy data sharing
66 clauses. The second phase expands code for the constructs, as well as
67 re-gimplifying things when variables have been replaced with complex
68 expressions.
70 Final code generation is done by pass_expand_omp. The flowgraph is
71 scanned for regions which are then moved to a new
72 function, to be invoked by the thread library, or offloaded. */
74 /* Context structure. Used to store information about each parallel
75 directive in the code. */
77 struct omp_context
79 /* This field must be at the beginning, as we do "inheritance": Some
80 callback functions for tree-inline.c (e.g., omp_copy_decl)
81 receive a copy_body_data pointer that is up-casted to an
82 omp_context pointer. */
83 copy_body_data cb;
85 /* The tree of contexts corresponding to the encountered constructs. */
86 struct omp_context *outer;
87 gimple *stmt;
89 /* Map variables to fields in a structure that allows communication
90 between sending and receiving threads. */
91 splay_tree field_map;
92 tree record_type;
93 tree sender_decl;
94 tree receiver_decl;
96 /* These are used just by task contexts, if task firstprivate fn is
97 needed. srecord_type is used to communicate from the thread
98 that encountered the task construct to task firstprivate fn,
99 record_type is allocated by GOMP_task, initialized by task firstprivate
100 fn and passed to the task body fn. */
101 splay_tree sfield_map;
102 tree srecord_type;
104 /* A chain of variables to add to the top-level block surrounding the
105 construct. In the case of a parallel, this is in the child function. */
106 tree block_vars;
108 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
109 barriers should jump to during omplower pass. */
110 tree cancel_label;
112 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
113 otherwise. */
114 gimple *simt_stmt;
116 /* For task reductions registered in this context, a vector containing
117 the length of the private copies block (if constant, otherwise NULL)
118 and then offsets (if constant, otherwise NULL) for each entry. */
119 vec<tree> task_reductions;
121 /* A hash map from the reduction clauses to the registered array
122 elts. */
123 hash_map<tree, unsigned> *task_reduction_map;
125 /* And a hash map from the lastprivate(conditional:) variables to their
126 corresponding tracking loop iteration variables. */
127 hash_map<tree, tree> *lastprivate_conditional_map;
129 /* A tree_list of the reduction clauses in this context. This is
130 only used for checking the consistency of OpenACC reduction
131 clauses in scan_omp_for and is not guaranteed to contain a valid
132 value outside of this function. */
133 tree local_reduction_clauses;
135 /* A tree_list of the reduction clauses in outer contexts. This is
136 only used for checking the consistency of OpenACC reduction
137 clauses in scan_omp_for and is not guaranteed to contain a valid
138 value outside of this function. */
139 tree outer_reduction_clauses;
141 /* Nesting depth of this context. Used to beautify error messages re
142 invalid gotos. The outermost ctx is depth 1, with depth 0 being
143 reserved for the main body of the function. */
144 int depth;
146 /* True if this parallel directive is nested within another. */
147 bool is_nested;
149 /* True if this construct can be cancelled. */
150 bool cancellable;
152 /* True if lower_omp_1 should look up lastprivate conditional in parent
153 context. */
154 bool combined_into_simd_safelen1;
156 /* True if there is nested scan context with inclusive clause. */
157 bool scan_inclusive;
159 /* True if there is nested scan context with exclusive clause. */
160 bool scan_exclusive;
162 /* True in the second simd loop of for simd with inscan reductions. */
163 bool for_simd_scan_phase;
165 /* True if there is order(concurrent) clause on the construct. */
166 bool order_concurrent;
168 /* True if there is bind clause on the construct (i.e. a loop construct). */
169 bool loop_p;
172 static splay_tree all_contexts;
173 static int taskreg_nesting_level;
174 static int target_nesting_level;
175 static bitmap task_shared_vars;
176 static bitmap global_nonaddressable_vars;
177 static vec<omp_context *> taskreg_contexts;
179 static void scan_omp (gimple_seq *, omp_context *);
180 static tree scan_omp_1_op (tree *, int *, void *);
182 #define WALK_SUBSTMTS \
183 case GIMPLE_BIND: \
184 case GIMPLE_TRY: \
185 case GIMPLE_CATCH: \
186 case GIMPLE_EH_FILTER: \
187 case GIMPLE_TRANSACTION: \
188 /* The sub-statements for these should be walked. */ \
189 *handled_ops_p = false; \
190 break;
192 /* Return true if CTX corresponds to an OpenACC 'parallel' or 'serial'
193 region. */
195 static bool
196 is_oacc_parallel_or_serial (omp_context *ctx)
198 enum gimple_code outer_type = gimple_code (ctx->stmt);
199 return ((outer_type == GIMPLE_OMP_TARGET)
200 && ((gimple_omp_target_kind (ctx->stmt)
201 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
202 || (gimple_omp_target_kind (ctx->stmt)
203 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
206 /* Return true if CTX corresponds to an oacc kernels region. */
208 static bool
209 is_oacc_kernels (omp_context *ctx)
211 enum gimple_code outer_type = gimple_code (ctx->stmt);
212 return ((outer_type == GIMPLE_OMP_TARGET)
213 && (gimple_omp_target_kind (ctx->stmt)
214 == GF_OMP_TARGET_KIND_OACC_KERNELS));
217 /* If DECL is the artificial dummy VAR_DECL created for non-static
218 data member privatization, return the underlying "this" parameter,
219 otherwise return NULL. */
221 tree
222 omp_member_access_dummy_var (tree decl)
224 if (!VAR_P (decl)
225 || !DECL_ARTIFICIAL (decl)
226 || !DECL_IGNORED_P (decl)
227 || !DECL_HAS_VALUE_EXPR_P (decl)
228 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
229 return NULL_TREE;
231 tree v = DECL_VALUE_EXPR (decl);
232 if (TREE_CODE (v) != COMPONENT_REF)
233 return NULL_TREE;
235 while (1)
236 switch (TREE_CODE (v))
238 case COMPONENT_REF:
239 case MEM_REF:
240 case INDIRECT_REF:
241 CASE_CONVERT:
242 case POINTER_PLUS_EXPR:
243 v = TREE_OPERAND (v, 0);
244 continue;
245 case PARM_DECL:
246 if (DECL_CONTEXT (v) == current_function_decl
247 && DECL_ARTIFICIAL (v)
248 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
249 return v;
250 return NULL_TREE;
251 default:
252 return NULL_TREE;
256 /* Helper for unshare_and_remap, called through walk_tree. */
258 static tree
259 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
261 tree *pair = (tree *) data;
262 if (*tp == pair[0])
264 *tp = unshare_expr (pair[1]);
265 *walk_subtrees = 0;
267 else if (IS_TYPE_OR_DECL_P (*tp))
268 *walk_subtrees = 0;
269 return NULL_TREE;
272 /* Return unshare_expr (X) with all occurrences of FROM
273 replaced with TO. */
275 static tree
276 unshare_and_remap (tree x, tree from, tree to)
278 tree pair[2] = { from, to };
279 x = unshare_expr (x);
280 walk_tree (&x, unshare_and_remap_1, pair, NULL);
281 return x;
284 /* Convenience function for calling scan_omp_1_op on tree operands. */
286 static inline tree
287 scan_omp_op (tree *tp, omp_context *ctx)
289 struct walk_stmt_info wi;
291 memset (&wi, 0, sizeof (wi));
292 wi.info = ctx;
293 wi.want_locations = true;
295 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
298 static void lower_omp (gimple_seq *, omp_context *);
299 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
300 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
302 /* Return true if CTX is for an omp parallel. */
304 static inline bool
305 is_parallel_ctx (omp_context *ctx)
307 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
311 /* Return true if CTX is for an omp task. */
313 static inline bool
314 is_task_ctx (omp_context *ctx)
316 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
320 /* Return true if CTX is for an omp taskloop. */
322 static inline bool
323 is_taskloop_ctx (omp_context *ctx)
325 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
326 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
330 /* Return true if CTX is for a host omp teams. */
332 static inline bool
333 is_host_teams_ctx (omp_context *ctx)
335 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
336 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
339 /* Return true if CTX is for an omp parallel or omp task or host omp teams
340 (the last one is strictly not a task region in OpenMP speak, but we
341 need to treat it similarly). */
343 static inline bool
344 is_taskreg_ctx (omp_context *ctx)
346 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
349 /* Return true if EXPR is variable sized. */
351 static inline bool
352 is_variable_sized (const_tree expr)
354 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
357 /* Lookup variables. The "maybe" form
358 allows for the variable form to not have been entered, otherwise we
359 assert that the variable must have been entered. */
361 static inline tree
362 lookup_decl (tree var, omp_context *ctx)
364 tree *n = ctx->cb.decl_map->get (var);
365 return *n;
368 static inline tree
369 maybe_lookup_decl (const_tree var, omp_context *ctx)
371 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
372 return n ? *n : NULL_TREE;
375 static inline tree
376 lookup_field (tree var, omp_context *ctx)
378 splay_tree_node n;
379 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
380 return (tree) n->value;
383 static inline tree
384 lookup_sfield (splay_tree_key key, omp_context *ctx)
386 splay_tree_node n;
387 n = splay_tree_lookup (ctx->sfield_map
388 ? ctx->sfield_map : ctx->field_map, key);
389 return (tree) n->value;
392 static inline tree
393 lookup_sfield (tree var, omp_context *ctx)
395 return lookup_sfield ((splay_tree_key) var, ctx);
398 static inline tree
399 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
401 splay_tree_node n;
402 n = splay_tree_lookup (ctx->field_map, key);
403 return n ? (tree) n->value : NULL_TREE;
406 static inline tree
407 maybe_lookup_field (tree var, omp_context *ctx)
409 return maybe_lookup_field ((splay_tree_key) var, ctx);
412 /* Return true if DECL should be copied by pointer. SHARED_CTX is
413 the parallel context if DECL is to be shared. */
415 static bool
416 use_pointer_for_field (tree decl, omp_context *shared_ctx)
418 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
419 || TYPE_ATOMIC (TREE_TYPE (decl)))
420 return true;
422 /* We can only use copy-in/copy-out semantics for shared variables
423 when we know the value is not accessible from an outer scope. */
424 if (shared_ctx)
426 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
428 /* ??? Trivially accessible from anywhere. But why would we even
429 be passing an address in this case? Should we simply assert
430 this to be false, or should we have a cleanup pass that removes
431 these from the list of mappings? */
432 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
433 return true;
435 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
436 without analyzing the expression whether or not its location
437 is accessible to anyone else. In the case of nested parallel
438 regions it certainly may be. */
439 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
440 return true;
442 /* Do not use copy-in/copy-out for variables that have their
443 address taken. */
444 if (is_global_var (decl))
446 /* For file scope vars, track whether we've seen them as
447 non-addressable initially and in that case, keep the same
448 answer for the duration of the pass, even when they are made
449 addressable later on e.g. through reduction expansion. Global
450 variables which weren't addressable before the pass will not
451 have their privatized copies address taken. See PR91216. */
452 if (!TREE_ADDRESSABLE (decl))
454 if (!global_nonaddressable_vars)
455 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
456 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
458 else if (!global_nonaddressable_vars
459 || !bitmap_bit_p (global_nonaddressable_vars,
460 DECL_UID (decl)))
461 return true;
463 else if (TREE_ADDRESSABLE (decl))
464 return true;
466 /* lower_send_shared_vars only uses copy-in, but not copy-out
467 for these. */
468 if (TREE_READONLY (decl)
469 || ((TREE_CODE (decl) == RESULT_DECL
470 || TREE_CODE (decl) == PARM_DECL)
471 && DECL_BY_REFERENCE (decl)))
472 return false;
474 /* Disallow copy-in/out in nested parallel if
475 decl is shared in outer parallel, otherwise
476 each thread could store the shared variable
477 in its own copy-in location, making the
478 variable no longer really shared. */
479 if (shared_ctx->is_nested)
481 omp_context *up;
483 for (up = shared_ctx->outer; up; up = up->outer)
484 if ((is_taskreg_ctx (up)
485 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
486 && is_gimple_omp_offloaded (up->stmt)))
487 && maybe_lookup_decl (decl, up))
488 break;
490 if (up)
492 tree c;
494 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
496 for (c = gimple_omp_target_clauses (up->stmt);
497 c; c = OMP_CLAUSE_CHAIN (c))
498 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
499 && OMP_CLAUSE_DECL (c) == decl)
500 break;
502 else
503 for (c = gimple_omp_taskreg_clauses (up->stmt);
504 c; c = OMP_CLAUSE_CHAIN (c))
505 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
506 && OMP_CLAUSE_DECL (c) == decl)
507 break;
509 if (c)
510 goto maybe_mark_addressable_and_ret;
514 /* For tasks avoid using copy-in/out. As tasks can be
515 deferred or executed in different thread, when GOMP_task
516 returns, the task hasn't necessarily terminated. */
517 if (is_task_ctx (shared_ctx))
519 tree outer;
520 maybe_mark_addressable_and_ret:
521 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
522 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
524 /* Taking address of OUTER in lower_send_shared_vars
525 might need regimplification of everything that uses the
526 variable. */
527 if (!task_shared_vars)
528 task_shared_vars = BITMAP_ALLOC (NULL);
529 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
530 TREE_ADDRESSABLE (outer) = 1;
532 return true;
536 return false;
539 /* Construct a new automatic decl similar to VAR. */
541 static tree
542 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
544 tree copy = copy_var_decl (var, name, type);
546 DECL_CONTEXT (copy) = current_function_decl;
547 DECL_CHAIN (copy) = ctx->block_vars;
548 /* If VAR is listed in task_shared_vars, it means it wasn't
549 originally addressable and is just because task needs to take
550 it's address. But we don't need to take address of privatizations
551 from that var. */
552 if (TREE_ADDRESSABLE (var)
553 && ((task_shared_vars
554 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
555 || (global_nonaddressable_vars
556 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
557 TREE_ADDRESSABLE (copy) = 0;
558 ctx->block_vars = copy;
560 return copy;
563 static tree
564 omp_copy_decl_1 (tree var, omp_context *ctx)
566 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
569 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
570 as appropriate. */
571 static tree
572 omp_build_component_ref (tree obj, tree field)
574 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
575 if (TREE_THIS_VOLATILE (field))
576 TREE_THIS_VOLATILE (ret) |= 1;
577 if (TREE_READONLY (field))
578 TREE_READONLY (ret) |= 1;
579 return ret;
582 /* Build tree nodes to access the field for VAR on the receiver side. */
584 static tree
585 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
587 tree x, field = lookup_field (var, ctx);
589 /* If the receiver record type was remapped in the child function,
590 remap the field into the new record type. */
591 x = maybe_lookup_field (field, ctx);
592 if (x != NULL)
593 field = x;
595 x = build_simple_mem_ref (ctx->receiver_decl);
596 TREE_THIS_NOTRAP (x) = 1;
597 x = omp_build_component_ref (x, field);
598 if (by_ref)
600 x = build_simple_mem_ref (x);
601 TREE_THIS_NOTRAP (x) = 1;
604 return x;
607 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
608 of a parallel, this is a component reference; for workshare constructs
609 this is some variable. */
611 static tree
612 build_outer_var_ref (tree var, omp_context *ctx,
613 enum omp_clause_code code = OMP_CLAUSE_ERROR)
615 tree x;
616 omp_context *outer = ctx->outer;
617 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
618 outer = outer->outer;
620 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
621 x = var;
622 else if (is_variable_sized (var))
624 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
625 x = build_outer_var_ref (x, ctx, code);
626 x = build_simple_mem_ref (x);
628 else if (is_taskreg_ctx (ctx))
630 bool by_ref = use_pointer_for_field (var, NULL);
631 x = build_receiver_ref (var, by_ref, ctx);
633 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
634 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
635 || ctx->loop_p
636 || (code == OMP_CLAUSE_PRIVATE
637 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
638 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
639 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
641 /* #pragma omp simd isn't a worksharing construct, and can reference
642 even private vars in its linear etc. clauses.
643 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
644 to private vars in all worksharing constructs. */
645 x = NULL_TREE;
646 if (outer && is_taskreg_ctx (outer))
647 x = lookup_decl (var, outer);
648 else if (outer)
649 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
650 if (x == NULL_TREE)
651 x = var;
653 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
655 gcc_assert (outer);
656 splay_tree_node n
657 = splay_tree_lookup (outer->field_map,
658 (splay_tree_key) &DECL_UID (var));
659 if (n == NULL)
661 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
662 x = var;
663 else
664 x = lookup_decl (var, outer);
666 else
668 tree field = (tree) n->value;
669 /* If the receiver record type was remapped in the child function,
670 remap the field into the new record type. */
671 x = maybe_lookup_field (field, outer);
672 if (x != NULL)
673 field = x;
675 x = build_simple_mem_ref (outer->receiver_decl);
676 x = omp_build_component_ref (x, field);
677 if (use_pointer_for_field (var, outer))
678 x = build_simple_mem_ref (x);
681 else if (outer)
682 x = lookup_decl (var, outer);
683 else if (omp_is_reference (var))
684 /* This can happen with orphaned constructs. If var is reference, it is
685 possible it is shared and as such valid. */
686 x = var;
687 else if (omp_member_access_dummy_var (var))
688 x = var;
689 else
690 gcc_unreachable ();
692 if (x == var)
694 tree t = omp_member_access_dummy_var (var);
695 if (t)
697 x = DECL_VALUE_EXPR (var);
698 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
699 if (o != t)
700 x = unshare_and_remap (x, t, o);
701 else
702 x = unshare_expr (x);
706 if (omp_is_reference (var))
707 x = build_simple_mem_ref (x);
709 return x;
712 /* Build tree nodes to access the field for VAR on the sender side. */
714 static tree
715 build_sender_ref (splay_tree_key key, omp_context *ctx)
717 tree field = lookup_sfield (key, ctx);
718 return omp_build_component_ref (ctx->sender_decl, field);
721 static tree
722 build_sender_ref (tree var, omp_context *ctx)
724 return build_sender_ref ((splay_tree_key) var, ctx);
727 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
728 BASE_POINTERS_RESTRICT, declare the field with restrict. */
730 static void
731 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
733 tree field, type, sfield = NULL_TREE;
734 splay_tree_key key = (splay_tree_key) var;
736 if ((mask & 16) != 0)
738 key = (splay_tree_key) &DECL_NAME (var);
739 gcc_checking_assert (key != (splay_tree_key) var);
741 if ((mask & 8) != 0)
743 key = (splay_tree_key) &DECL_UID (var);
744 gcc_checking_assert (key != (splay_tree_key) var);
746 gcc_assert ((mask & 1) == 0
747 || !splay_tree_lookup (ctx->field_map, key));
748 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
749 || !splay_tree_lookup (ctx->sfield_map, key));
750 gcc_assert ((mask & 3) == 3
751 || !is_gimple_omp_oacc (ctx->stmt));
753 type = TREE_TYPE (var);
754 if ((mask & 16) != 0)
755 type = lang_hooks.decls.omp_array_data (var, true);
757 /* Prevent redeclaring the var in the split-off function with a restrict
758 pointer type. Note that we only clear type itself, restrict qualifiers in
759 the pointed-to type will be ignored by points-to analysis. */
760 if (POINTER_TYPE_P (type)
761 && TYPE_RESTRICT (type))
762 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
764 if (mask & 4)
766 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
767 type = build_pointer_type (build_pointer_type (type));
769 else if (by_ref)
770 type = build_pointer_type (type);
771 else if ((mask & 3) == 1 && omp_is_reference (var))
772 type = TREE_TYPE (type);
774 field = build_decl (DECL_SOURCE_LOCATION (var),
775 FIELD_DECL, DECL_NAME (var), type);
777 /* Remember what variable this field was created for. This does have a
778 side effect of making dwarf2out ignore this member, so for helpful
779 debugging we clear it later in delete_omp_context. */
780 DECL_ABSTRACT_ORIGIN (field) = var;
781 if ((mask & 16) == 0 && type == TREE_TYPE (var))
783 SET_DECL_ALIGN (field, DECL_ALIGN (var));
784 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
785 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
787 else
788 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
790 if ((mask & 3) == 3)
792 insert_field_into_struct (ctx->record_type, field);
793 if (ctx->srecord_type)
795 sfield = build_decl (DECL_SOURCE_LOCATION (var),
796 FIELD_DECL, DECL_NAME (var), type);
797 DECL_ABSTRACT_ORIGIN (sfield) = var;
798 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
799 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
800 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
801 insert_field_into_struct (ctx->srecord_type, sfield);
804 else
806 if (ctx->srecord_type == NULL_TREE)
808 tree t;
810 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
811 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
812 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
814 sfield = build_decl (DECL_SOURCE_LOCATION (t),
815 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
816 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
817 insert_field_into_struct (ctx->srecord_type, sfield);
818 splay_tree_insert (ctx->sfield_map,
819 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
820 (splay_tree_value) sfield);
823 sfield = field;
824 insert_field_into_struct ((mask & 1) ? ctx->record_type
825 : ctx->srecord_type, field);
828 if (mask & 1)
829 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
830 if ((mask & 2) && ctx->sfield_map)
831 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
834 static tree
835 install_var_local (tree var, omp_context *ctx)
837 tree new_var = omp_copy_decl_1 (var, ctx);
838 insert_decl_map (&ctx->cb, var, new_var);
839 return new_var;
842 /* Adjust the replacement for DECL in CTX for the new context. This means
843 copying the DECL_VALUE_EXPR, and fixing up the type. */
845 static void
846 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
848 tree new_decl, size;
850 new_decl = lookup_decl (decl, ctx);
852 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
854 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
855 && DECL_HAS_VALUE_EXPR_P (decl))
857 tree ve = DECL_VALUE_EXPR (decl);
858 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
859 SET_DECL_VALUE_EXPR (new_decl, ve);
860 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
863 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
865 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
866 if (size == error_mark_node)
867 size = TYPE_SIZE (TREE_TYPE (new_decl));
868 DECL_SIZE (new_decl) = size;
870 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
871 if (size == error_mark_node)
872 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
873 DECL_SIZE_UNIT (new_decl) = size;
877 /* The callback for remap_decl. Search all containing contexts for a
878 mapping of the variable; this avoids having to duplicate the splay
879 tree ahead of time. We know a mapping doesn't already exist in the
880 given context. Create new mappings to implement default semantics. */
882 static tree
883 omp_copy_decl (tree var, copy_body_data *cb)
885 omp_context *ctx = (omp_context *) cb;
886 tree new_var;
888 if (TREE_CODE (var) == LABEL_DECL)
890 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
891 return var;
892 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
893 DECL_CONTEXT (new_var) = current_function_decl;
894 insert_decl_map (&ctx->cb, var, new_var);
895 return new_var;
898 while (!is_taskreg_ctx (ctx))
900 ctx = ctx->outer;
901 if (ctx == NULL)
902 return var;
903 new_var = maybe_lookup_decl (var, ctx);
904 if (new_var)
905 return new_var;
908 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
909 return var;
911 return error_mark_node;
914 /* Create a new context, with OUTER_CTX being the surrounding context. */
916 static omp_context *
917 new_omp_context (gimple *stmt, omp_context *outer_ctx)
919 omp_context *ctx = XCNEW (omp_context);
921 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
922 (splay_tree_value) ctx);
923 ctx->stmt = stmt;
925 if (outer_ctx)
927 ctx->outer = outer_ctx;
928 ctx->cb = outer_ctx->cb;
929 ctx->cb.block = NULL;
930 ctx->depth = outer_ctx->depth + 1;
932 else
934 ctx->cb.src_fn = current_function_decl;
935 ctx->cb.dst_fn = current_function_decl;
936 ctx->cb.src_node = cgraph_node::get (current_function_decl);
937 gcc_checking_assert (ctx->cb.src_node);
938 ctx->cb.dst_node = ctx->cb.src_node;
939 ctx->cb.src_cfun = cfun;
940 ctx->cb.copy_decl = omp_copy_decl;
941 ctx->cb.eh_lp_nr = 0;
942 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
943 ctx->cb.adjust_array_error_bounds = true;
944 ctx->cb.dont_remap_vla_if_no_change = true;
945 ctx->depth = 1;
948 ctx->cb.decl_map = new hash_map<tree, tree>;
950 return ctx;
953 static gimple_seq maybe_catch_exception (gimple_seq);
955 /* Finalize task copyfn. */
957 static void
958 finalize_task_copyfn (gomp_task *task_stmt)
960 struct function *child_cfun;
961 tree child_fn;
962 gimple_seq seq = NULL, new_seq;
963 gbind *bind;
965 child_fn = gimple_omp_task_copy_fn (task_stmt);
966 if (child_fn == NULL_TREE)
967 return;
969 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
970 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
972 push_cfun (child_cfun);
973 bind = gimplify_body (child_fn, false);
974 gimple_seq_add_stmt (&seq, bind);
975 new_seq = maybe_catch_exception (seq);
976 if (new_seq != seq)
978 bind = gimple_build_bind (NULL, new_seq, NULL);
979 seq = NULL;
980 gimple_seq_add_stmt (&seq, bind);
982 gimple_set_body (child_fn, seq);
983 pop_cfun ();
985 /* Inform the callgraph about the new function. */
986 cgraph_node *node = cgraph_node::get_create (child_fn);
987 node->parallelized_function = 1;
988 cgraph_node::add_new_function (child_fn, false);
991 /* Destroy a omp_context data structures. Called through the splay tree
992 value delete callback. */
994 static void
995 delete_omp_context (splay_tree_value value)
997 omp_context *ctx = (omp_context *) value;
999 delete ctx->cb.decl_map;
1001 if (ctx->field_map)
1002 splay_tree_delete (ctx->field_map);
1003 if (ctx->sfield_map)
1004 splay_tree_delete (ctx->sfield_map);
1006 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1007 it produces corrupt debug information. */
1008 if (ctx->record_type)
1010 tree t;
1011 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1012 DECL_ABSTRACT_ORIGIN (t) = NULL;
1014 if (ctx->srecord_type)
1016 tree t;
1017 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1018 DECL_ABSTRACT_ORIGIN (t) = NULL;
1021 if (is_task_ctx (ctx))
1022 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1024 if (ctx->task_reduction_map)
1026 ctx->task_reductions.release ();
1027 delete ctx->task_reduction_map;
1030 delete ctx->lastprivate_conditional_map;
1032 XDELETE (ctx);
1035 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1036 context. */
1038 static void
1039 fixup_child_record_type (omp_context *ctx)
1041 tree f, type = ctx->record_type;
1043 if (!ctx->receiver_decl)
1044 return;
1045 /* ??? It isn't sufficient to just call remap_type here, because
1046 variably_modified_type_p doesn't work the way we expect for
1047 record types. Testing each field for whether it needs remapping
1048 and creating a new record by hand works, however. */
1049 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1050 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1051 break;
1052 if (f)
1054 tree name, new_fields = NULL;
1056 type = lang_hooks.types.make_type (RECORD_TYPE);
1057 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1058 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1059 TYPE_DECL, name, type);
1060 TYPE_NAME (type) = name;
1062 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1064 tree new_f = copy_node (f);
1065 DECL_CONTEXT (new_f) = type;
1066 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1067 DECL_CHAIN (new_f) = new_fields;
1068 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1069 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1070 &ctx->cb, NULL);
1071 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1072 &ctx->cb, NULL);
1073 new_fields = new_f;
1075 /* Arrange to be able to look up the receiver field
1076 given the sender field. */
1077 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1078 (splay_tree_value) new_f);
1080 TYPE_FIELDS (type) = nreverse (new_fields);
1081 layout_type (type);
1084 /* In a target region we never modify any of the pointers in *.omp_data_i,
1085 so attempt to help the optimizers. */
1086 if (is_gimple_omp_offloaded (ctx->stmt))
1087 type = build_qualified_type (type, TYPE_QUAL_CONST);
1089 TREE_TYPE (ctx->receiver_decl)
1090 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1093 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1094 specified by CLAUSES. */
1096 static void
1097 scan_sharing_clauses (tree clauses, omp_context *ctx)
1099 tree c, decl;
1100 bool scan_array_reductions = false;
1102 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1104 bool by_ref;
1106 switch (OMP_CLAUSE_CODE (c))
1108 case OMP_CLAUSE_PRIVATE:
1109 decl = OMP_CLAUSE_DECL (c);
1110 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1111 goto do_private;
1112 else if (!is_variable_sized (decl))
1113 install_var_local (decl, ctx);
1114 break;
1116 case OMP_CLAUSE_SHARED:
1117 decl = OMP_CLAUSE_DECL (c);
1118 /* Ignore shared directives in teams construct inside of
1119 target construct. */
1120 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1121 && !is_host_teams_ctx (ctx))
1123 /* Global variables don't need to be copied,
1124 the receiver side will use them directly. */
1125 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1126 if (is_global_var (odecl))
1127 break;
1128 insert_decl_map (&ctx->cb, decl, odecl);
1129 break;
1131 gcc_assert (is_taskreg_ctx (ctx));
1132 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1133 || !is_variable_sized (decl));
1134 /* Global variables don't need to be copied,
1135 the receiver side will use them directly. */
1136 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1137 break;
1138 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1140 use_pointer_for_field (decl, ctx);
1141 break;
1143 by_ref = use_pointer_for_field (decl, NULL);
1144 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1145 || TREE_ADDRESSABLE (decl)
1146 || by_ref
1147 || omp_is_reference (decl))
1149 by_ref = use_pointer_for_field (decl, ctx);
1150 install_var_field (decl, by_ref, 3, ctx);
1151 install_var_local (decl, ctx);
1152 break;
1154 /* We don't need to copy const scalar vars back. */
1155 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1156 goto do_private;
1158 case OMP_CLAUSE_REDUCTION:
1159 if (is_oacc_parallel_or_serial (ctx) || is_oacc_kernels (ctx))
1160 ctx->local_reduction_clauses
1161 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1162 /* FALLTHRU */
1164 case OMP_CLAUSE_IN_REDUCTION:
1165 decl = OMP_CLAUSE_DECL (c);
1166 if (TREE_CODE (decl) == MEM_REF)
1168 tree t = TREE_OPERAND (decl, 0);
1169 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1170 t = TREE_OPERAND (t, 0);
1171 if (TREE_CODE (t) == INDIRECT_REF
1172 || TREE_CODE (t) == ADDR_EXPR)
1173 t = TREE_OPERAND (t, 0);
1174 install_var_local (t, ctx);
1175 if (is_taskreg_ctx (ctx)
1176 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1177 || (is_task_ctx (ctx)
1178 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1179 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1180 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1181 == POINTER_TYPE)))))
1182 && !is_variable_sized (t)
1183 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1184 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1185 && !is_task_ctx (ctx))))
1187 by_ref = use_pointer_for_field (t, NULL);
1188 if (is_task_ctx (ctx)
1189 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1190 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1192 install_var_field (t, false, 1, ctx);
1193 install_var_field (t, by_ref, 2, ctx);
1195 else
1196 install_var_field (t, by_ref, 3, ctx);
1198 break;
1200 if (is_task_ctx (ctx)
1201 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1202 && OMP_CLAUSE_REDUCTION_TASK (c)
1203 && is_parallel_ctx (ctx)))
1205 /* Global variables don't need to be copied,
1206 the receiver side will use them directly. */
1207 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1209 by_ref = use_pointer_for_field (decl, ctx);
1210 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1211 install_var_field (decl, by_ref, 3, ctx);
1213 install_var_local (decl, ctx);
1214 break;
1216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1217 && OMP_CLAUSE_REDUCTION_TASK (c))
1219 install_var_local (decl, ctx);
1220 break;
1222 goto do_private;
1224 case OMP_CLAUSE_LASTPRIVATE:
1225 /* Let the corresponding firstprivate clause create
1226 the variable. */
1227 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1228 break;
1229 /* FALLTHRU */
1231 case OMP_CLAUSE_FIRSTPRIVATE:
1232 case OMP_CLAUSE_LINEAR:
1233 decl = OMP_CLAUSE_DECL (c);
1234 do_private:
1235 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1236 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1237 && is_gimple_omp_offloaded (ctx->stmt))
1239 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1240 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1241 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1242 install_var_field (decl, true, 3, ctx);
1243 else
1244 install_var_field (decl, false, 3, ctx);
1246 if (is_variable_sized (decl))
1248 if (is_task_ctx (ctx))
1249 install_var_field (decl, false, 1, ctx);
1250 break;
1252 else if (is_taskreg_ctx (ctx))
1254 bool global
1255 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1256 by_ref = use_pointer_for_field (decl, NULL);
1258 if (is_task_ctx (ctx)
1259 && (global || by_ref || omp_is_reference (decl)))
1261 install_var_field (decl, false, 1, ctx);
1262 if (!global)
1263 install_var_field (decl, by_ref, 2, ctx);
1265 else if (!global)
1266 install_var_field (decl, by_ref, 3, ctx);
1268 install_var_local (decl, ctx);
1269 break;
1271 case OMP_CLAUSE_USE_DEVICE_PTR:
1272 case OMP_CLAUSE_USE_DEVICE_ADDR:
1273 decl = OMP_CLAUSE_DECL (c);
1275 /* Fortran array descriptors. */
1276 if (lang_hooks.decls.omp_array_data (decl, true))
1277 install_var_field (decl, false, 19, ctx);
1278 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1279 && !omp_is_reference (decl)
1280 && !omp_is_allocatable_or_ptr (decl))
1281 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1282 install_var_field (decl, true, 11, ctx);
1283 else
1284 install_var_field (decl, false, 11, ctx);
1285 if (DECL_SIZE (decl)
1286 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1288 tree decl2 = DECL_VALUE_EXPR (decl);
1289 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1290 decl2 = TREE_OPERAND (decl2, 0);
1291 gcc_assert (DECL_P (decl2));
1292 install_var_local (decl2, ctx);
1294 install_var_local (decl, ctx);
1295 break;
1297 case OMP_CLAUSE_IS_DEVICE_PTR:
1298 decl = OMP_CLAUSE_DECL (c);
1299 goto do_private;
1301 case OMP_CLAUSE__LOOPTEMP_:
1302 case OMP_CLAUSE__REDUCTEMP_:
1303 gcc_assert (is_taskreg_ctx (ctx));
1304 decl = OMP_CLAUSE_DECL (c);
1305 install_var_field (decl, false, 3, ctx);
1306 install_var_local (decl, ctx);
1307 break;
1309 case OMP_CLAUSE_COPYPRIVATE:
1310 case OMP_CLAUSE_COPYIN:
1311 decl = OMP_CLAUSE_DECL (c);
1312 by_ref = use_pointer_for_field (decl, NULL);
1313 install_var_field (decl, by_ref, 3, ctx);
1314 break;
1316 case OMP_CLAUSE_FINAL:
1317 case OMP_CLAUSE_IF:
1318 case OMP_CLAUSE_NUM_THREADS:
1319 case OMP_CLAUSE_NUM_TEAMS:
1320 case OMP_CLAUSE_THREAD_LIMIT:
1321 case OMP_CLAUSE_DEVICE:
1322 case OMP_CLAUSE_SCHEDULE:
1323 case OMP_CLAUSE_DIST_SCHEDULE:
1324 case OMP_CLAUSE_DEPEND:
1325 case OMP_CLAUSE_PRIORITY:
1326 case OMP_CLAUSE_GRAINSIZE:
1327 case OMP_CLAUSE_NUM_TASKS:
1328 case OMP_CLAUSE_NUM_GANGS:
1329 case OMP_CLAUSE_NUM_WORKERS:
1330 case OMP_CLAUSE_VECTOR_LENGTH:
1331 if (ctx->outer)
1332 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1333 break;
1335 case OMP_CLAUSE_TO:
1336 case OMP_CLAUSE_FROM:
1337 case OMP_CLAUSE_MAP:
1338 if (ctx->outer)
1339 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1340 decl = OMP_CLAUSE_DECL (c);
1341 /* Global variables with "omp declare target" attribute
1342 don't need to be copied, the receiver side will use them
1343 directly. However, global variables with "omp declare target link"
1344 attribute need to be copied. Or when ALWAYS modifier is used. */
1345 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1346 && DECL_P (decl)
1347 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1348 && (OMP_CLAUSE_MAP_KIND (c)
1349 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1350 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1351 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1352 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1353 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1354 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1355 && varpool_node::get_create (decl)->offloadable
1356 && !lookup_attribute ("omp declare target link",
1357 DECL_ATTRIBUTES (decl)))
1358 break;
1359 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1360 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1362 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1363 not offloaded; there is nothing to map for those. */
1364 if (!is_gimple_omp_offloaded (ctx->stmt)
1365 && !POINTER_TYPE_P (TREE_TYPE (decl))
1366 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1367 break;
1369 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1370 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1371 || (OMP_CLAUSE_MAP_KIND (c)
1372 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1374 if (TREE_CODE (decl) == COMPONENT_REF
1375 || (TREE_CODE (decl) == INDIRECT_REF
1376 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1377 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1378 == REFERENCE_TYPE)))
1379 break;
1380 if (DECL_SIZE (decl)
1381 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1383 tree decl2 = DECL_VALUE_EXPR (decl);
1384 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1385 decl2 = TREE_OPERAND (decl2, 0);
1386 gcc_assert (DECL_P (decl2));
1387 install_var_local (decl2, ctx);
1389 install_var_local (decl, ctx);
1390 break;
1392 if (DECL_P (decl))
1394 if (DECL_SIZE (decl)
1395 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1397 tree decl2 = DECL_VALUE_EXPR (decl);
1398 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1399 decl2 = TREE_OPERAND (decl2, 0);
1400 gcc_assert (DECL_P (decl2));
1401 install_var_field (decl2, true, 3, ctx);
1402 install_var_local (decl2, ctx);
1403 install_var_local (decl, ctx);
1405 else
1407 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1408 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1409 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1410 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1411 install_var_field (decl, true, 7, ctx);
1412 else
1413 install_var_field (decl, true, 3, ctx);
1414 if (is_gimple_omp_offloaded (ctx->stmt)
1415 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1416 install_var_local (decl, ctx);
1419 else
1421 tree base = get_base_address (decl);
1422 tree nc = OMP_CLAUSE_CHAIN (c);
1423 if (DECL_P (base)
1424 && nc != NULL_TREE
1425 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1426 && OMP_CLAUSE_DECL (nc) == base
1427 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1428 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1430 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1431 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1433 else
1435 if (ctx->outer)
1437 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1438 decl = OMP_CLAUSE_DECL (c);
1440 gcc_assert (!splay_tree_lookup (ctx->field_map,
1441 (splay_tree_key) decl));
1442 tree field
1443 = build_decl (OMP_CLAUSE_LOCATION (c),
1444 FIELD_DECL, NULL_TREE, ptr_type_node);
1445 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1446 insert_field_into_struct (ctx->record_type, field);
1447 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1448 (splay_tree_value) field);
1451 break;
1453 case OMP_CLAUSE_ORDER:
1454 ctx->order_concurrent = true;
1455 break;
1457 case OMP_CLAUSE_BIND:
1458 ctx->loop_p = true;
1459 break;
1461 case OMP_CLAUSE_NOWAIT:
1462 case OMP_CLAUSE_ORDERED:
1463 case OMP_CLAUSE_COLLAPSE:
1464 case OMP_CLAUSE_UNTIED:
1465 case OMP_CLAUSE_MERGEABLE:
1466 case OMP_CLAUSE_PROC_BIND:
1467 case OMP_CLAUSE_SAFELEN:
1468 case OMP_CLAUSE_SIMDLEN:
1469 case OMP_CLAUSE_THREADS:
1470 case OMP_CLAUSE_SIMD:
1471 case OMP_CLAUSE_NOGROUP:
1472 case OMP_CLAUSE_DEFAULTMAP:
1473 case OMP_CLAUSE_ASYNC:
1474 case OMP_CLAUSE_WAIT:
1475 case OMP_CLAUSE_GANG:
1476 case OMP_CLAUSE_WORKER:
1477 case OMP_CLAUSE_VECTOR:
1478 case OMP_CLAUSE_INDEPENDENT:
1479 case OMP_CLAUSE_AUTO:
1480 case OMP_CLAUSE_SEQ:
1481 case OMP_CLAUSE_TILE:
1482 case OMP_CLAUSE__SIMT_:
1483 case OMP_CLAUSE_DEFAULT:
1484 case OMP_CLAUSE_NONTEMPORAL:
1485 case OMP_CLAUSE_IF_PRESENT:
1486 case OMP_CLAUSE_FINALIZE:
1487 case OMP_CLAUSE_TASK_REDUCTION:
1488 break;
1490 case OMP_CLAUSE_ALIGNED:
1491 decl = OMP_CLAUSE_DECL (c);
1492 if (is_global_var (decl)
1493 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1494 install_var_local (decl, ctx);
1495 break;
1497 case OMP_CLAUSE__CONDTEMP_:
1498 decl = OMP_CLAUSE_DECL (c);
1499 if (is_parallel_ctx (ctx))
1501 install_var_field (decl, false, 3, ctx);
1502 install_var_local (decl, ctx);
1504 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1505 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1506 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1507 install_var_local (decl, ctx);
1508 break;
1510 case OMP_CLAUSE__CACHE_:
1511 default:
1512 gcc_unreachable ();
1516 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1518 switch (OMP_CLAUSE_CODE (c))
1520 case OMP_CLAUSE_LASTPRIVATE:
1521 /* Let the corresponding firstprivate clause create
1522 the variable. */
1523 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1524 scan_array_reductions = true;
1525 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1526 break;
1527 /* FALLTHRU */
1529 case OMP_CLAUSE_FIRSTPRIVATE:
1530 case OMP_CLAUSE_PRIVATE:
1531 case OMP_CLAUSE_LINEAR:
1532 case OMP_CLAUSE_IS_DEVICE_PTR:
1533 decl = OMP_CLAUSE_DECL (c);
1534 if (is_variable_sized (decl))
1536 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1537 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1538 && is_gimple_omp_offloaded (ctx->stmt))
1540 tree decl2 = DECL_VALUE_EXPR (decl);
1541 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1542 decl2 = TREE_OPERAND (decl2, 0);
1543 gcc_assert (DECL_P (decl2));
1544 install_var_local (decl2, ctx);
1545 fixup_remapped_decl (decl2, ctx, false);
1547 install_var_local (decl, ctx);
1549 fixup_remapped_decl (decl, ctx,
1550 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1551 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1552 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1553 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1554 scan_array_reductions = true;
1555 break;
1557 case OMP_CLAUSE_REDUCTION:
1558 case OMP_CLAUSE_IN_REDUCTION:
1559 decl = OMP_CLAUSE_DECL (c);
1560 if (TREE_CODE (decl) != MEM_REF)
1562 if (is_variable_sized (decl))
1563 install_var_local (decl, ctx);
1564 fixup_remapped_decl (decl, ctx, false);
1566 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1567 scan_array_reductions = true;
1568 break;
1570 case OMP_CLAUSE_TASK_REDUCTION:
1571 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1572 scan_array_reductions = true;
1573 break;
1575 case OMP_CLAUSE_SHARED:
1576 /* Ignore shared directives in teams construct inside of
1577 target construct. */
1578 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1579 && !is_host_teams_ctx (ctx))
1580 break;
1581 decl = OMP_CLAUSE_DECL (c);
1582 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1583 break;
1584 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1586 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1587 ctx->outer)))
1588 break;
1589 bool by_ref = use_pointer_for_field (decl, ctx);
1590 install_var_field (decl, by_ref, 11, ctx);
1591 break;
1593 fixup_remapped_decl (decl, ctx, false);
1594 break;
1596 case OMP_CLAUSE_MAP:
1597 if (!is_gimple_omp_offloaded (ctx->stmt))
1598 break;
1599 decl = OMP_CLAUSE_DECL (c);
1600 if (DECL_P (decl)
1601 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1602 && (OMP_CLAUSE_MAP_KIND (c)
1603 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1604 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1605 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1606 && varpool_node::get_create (decl)->offloadable)
1607 break;
1608 if (DECL_P (decl))
1610 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1611 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1612 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1613 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1615 tree new_decl = lookup_decl (decl, ctx);
1616 TREE_TYPE (new_decl)
1617 = remap_type (TREE_TYPE (decl), &ctx->cb);
1619 else if (DECL_SIZE (decl)
1620 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1622 tree decl2 = DECL_VALUE_EXPR (decl);
1623 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1624 decl2 = TREE_OPERAND (decl2, 0);
1625 gcc_assert (DECL_P (decl2));
1626 fixup_remapped_decl (decl2, ctx, false);
1627 fixup_remapped_decl (decl, ctx, true);
1629 else
1630 fixup_remapped_decl (decl, ctx, false);
1632 break;
1634 case OMP_CLAUSE_COPYPRIVATE:
1635 case OMP_CLAUSE_COPYIN:
1636 case OMP_CLAUSE_DEFAULT:
1637 case OMP_CLAUSE_IF:
1638 case OMP_CLAUSE_NUM_THREADS:
1639 case OMP_CLAUSE_NUM_TEAMS:
1640 case OMP_CLAUSE_THREAD_LIMIT:
1641 case OMP_CLAUSE_DEVICE:
1642 case OMP_CLAUSE_SCHEDULE:
1643 case OMP_CLAUSE_DIST_SCHEDULE:
1644 case OMP_CLAUSE_NOWAIT:
1645 case OMP_CLAUSE_ORDERED:
1646 case OMP_CLAUSE_COLLAPSE:
1647 case OMP_CLAUSE_UNTIED:
1648 case OMP_CLAUSE_FINAL:
1649 case OMP_CLAUSE_MERGEABLE:
1650 case OMP_CLAUSE_PROC_BIND:
1651 case OMP_CLAUSE_SAFELEN:
1652 case OMP_CLAUSE_SIMDLEN:
1653 case OMP_CLAUSE_ALIGNED:
1654 case OMP_CLAUSE_DEPEND:
1655 case OMP_CLAUSE__LOOPTEMP_:
1656 case OMP_CLAUSE__REDUCTEMP_:
1657 case OMP_CLAUSE_TO:
1658 case OMP_CLAUSE_FROM:
1659 case OMP_CLAUSE_PRIORITY:
1660 case OMP_CLAUSE_GRAINSIZE:
1661 case OMP_CLAUSE_NUM_TASKS:
1662 case OMP_CLAUSE_THREADS:
1663 case OMP_CLAUSE_SIMD:
1664 case OMP_CLAUSE_NOGROUP:
1665 case OMP_CLAUSE_DEFAULTMAP:
1666 case OMP_CLAUSE_ORDER:
1667 case OMP_CLAUSE_BIND:
1668 case OMP_CLAUSE_USE_DEVICE_PTR:
1669 case OMP_CLAUSE_USE_DEVICE_ADDR:
1670 case OMP_CLAUSE_NONTEMPORAL:
1671 case OMP_CLAUSE_ASYNC:
1672 case OMP_CLAUSE_WAIT:
1673 case OMP_CLAUSE_NUM_GANGS:
1674 case OMP_CLAUSE_NUM_WORKERS:
1675 case OMP_CLAUSE_VECTOR_LENGTH:
1676 case OMP_CLAUSE_GANG:
1677 case OMP_CLAUSE_WORKER:
1678 case OMP_CLAUSE_VECTOR:
1679 case OMP_CLAUSE_INDEPENDENT:
1680 case OMP_CLAUSE_AUTO:
1681 case OMP_CLAUSE_SEQ:
1682 case OMP_CLAUSE_TILE:
1683 case OMP_CLAUSE__SIMT_:
1684 case OMP_CLAUSE_IF_PRESENT:
1685 case OMP_CLAUSE_FINALIZE:
1686 case OMP_CLAUSE__CONDTEMP_:
1687 break;
1689 case OMP_CLAUSE__CACHE_:
1690 default:
1691 gcc_unreachable ();
1695 gcc_checking_assert (!scan_array_reductions
1696 || !is_gimple_omp_oacc (ctx->stmt));
1697 if (scan_array_reductions)
1699 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1700 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1701 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1702 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1703 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1705 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1706 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1708 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1709 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1710 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1711 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1712 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1713 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1717 /* Create a new name for omp child function. Returns an identifier. */
1719 static tree
1720 create_omp_child_function_name (bool task_copy)
1722 return clone_function_name_numbered (current_function_decl,
1723 task_copy ? "_omp_cpyfn" : "_omp_fn");
1726 /* Return true if CTX may belong to offloaded code: either if current function
1727 is offloaded, or any enclosing context corresponds to a target region. */
1729 static bool
1730 omp_maybe_offloaded_ctx (omp_context *ctx)
1732 if (cgraph_node::get (current_function_decl)->offloadable)
1733 return true;
1734 for (; ctx; ctx = ctx->outer)
1735 if (is_gimple_omp_offloaded (ctx->stmt))
1736 return true;
1737 return false;
1740 /* Build a decl for the omp child function. It'll not contain a body
1741 yet, just the bare decl. */
1743 static void
1744 create_omp_child_function (omp_context *ctx, bool task_copy)
1746 tree decl, type, name, t;
1748 name = create_omp_child_function_name (task_copy);
1749 if (task_copy)
1750 type = build_function_type_list (void_type_node, ptr_type_node,
1751 ptr_type_node, NULL_TREE);
1752 else
1753 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1755 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1757 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1758 || !task_copy);
1759 if (!task_copy)
1760 ctx->cb.dst_fn = decl;
1761 else
1762 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1764 TREE_STATIC (decl) = 1;
1765 TREE_USED (decl) = 1;
1766 DECL_ARTIFICIAL (decl) = 1;
1767 DECL_IGNORED_P (decl) = 0;
1768 TREE_PUBLIC (decl) = 0;
1769 DECL_UNINLINABLE (decl) = 1;
1770 DECL_EXTERNAL (decl) = 0;
1771 DECL_CONTEXT (decl) = NULL_TREE;
1772 DECL_INITIAL (decl) = make_node (BLOCK);
1773 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1774 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1775 /* Remove omp declare simd attribute from the new attributes. */
1776 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1778 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1779 a = a2;
1780 a = TREE_CHAIN (a);
1781 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1782 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1783 *p = TREE_CHAIN (*p);
1784 else
1786 tree chain = TREE_CHAIN (*p);
1787 *p = copy_node (*p);
1788 p = &TREE_CHAIN (*p);
1789 *p = chain;
1792 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1793 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1794 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1795 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1796 DECL_FUNCTION_VERSIONED (decl)
1797 = DECL_FUNCTION_VERSIONED (current_function_decl);
1799 if (omp_maybe_offloaded_ctx (ctx))
1801 cgraph_node::get_create (decl)->offloadable = 1;
1802 if (ENABLE_OFFLOADING)
1803 g->have_offload = true;
1806 if (cgraph_node::get_create (decl)->offloadable
1807 && !lookup_attribute ("omp declare target",
1808 DECL_ATTRIBUTES (current_function_decl)))
1810 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1811 ? "omp target entrypoint"
1812 : "omp declare target");
1813 DECL_ATTRIBUTES (decl)
1814 = tree_cons (get_identifier (target_attr),
1815 NULL_TREE, DECL_ATTRIBUTES (decl));
1818 t = build_decl (DECL_SOURCE_LOCATION (decl),
1819 RESULT_DECL, NULL_TREE, void_type_node);
1820 DECL_ARTIFICIAL (t) = 1;
1821 DECL_IGNORED_P (t) = 1;
1822 DECL_CONTEXT (t) = decl;
1823 DECL_RESULT (decl) = t;
1825 tree data_name = get_identifier (".omp_data_i");
1826 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1827 ptr_type_node);
1828 DECL_ARTIFICIAL (t) = 1;
1829 DECL_NAMELESS (t) = 1;
1830 DECL_ARG_TYPE (t) = ptr_type_node;
1831 DECL_CONTEXT (t) = current_function_decl;
1832 TREE_USED (t) = 1;
1833 TREE_READONLY (t) = 1;
1834 DECL_ARGUMENTS (decl) = t;
1835 if (!task_copy)
1836 ctx->receiver_decl = t;
1837 else
1839 t = build_decl (DECL_SOURCE_LOCATION (decl),
1840 PARM_DECL, get_identifier (".omp_data_o"),
1841 ptr_type_node);
1842 DECL_ARTIFICIAL (t) = 1;
1843 DECL_NAMELESS (t) = 1;
1844 DECL_ARG_TYPE (t) = ptr_type_node;
1845 DECL_CONTEXT (t) = current_function_decl;
1846 TREE_USED (t) = 1;
1847 TREE_ADDRESSABLE (t) = 1;
1848 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1849 DECL_ARGUMENTS (decl) = t;
1852 /* Allocate memory for the function structure. The call to
1853 allocate_struct_function clobbers CFUN, so we need to restore
1854 it afterward. */
1855 push_struct_function (decl);
1856 cfun->function_end_locus = gimple_location (ctx->stmt);
1857 init_tree_ssa (cfun);
1858 pop_cfun ();
1861 /* Callback for walk_gimple_seq. Check if combined parallel
1862 contains gimple_omp_for_combined_into_p OMP_FOR. */
1864 tree
1865 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1866 bool *handled_ops_p,
1867 struct walk_stmt_info *wi)
1869 gimple *stmt = gsi_stmt (*gsi_p);
1871 *handled_ops_p = true;
1872 switch (gimple_code (stmt))
1874 WALK_SUBSTMTS;
1876 case GIMPLE_OMP_FOR:
1877 if (gimple_omp_for_combined_into_p (stmt)
1878 && gimple_omp_for_kind (stmt)
1879 == *(const enum gf_mask *) (wi->info))
1881 wi->info = stmt;
1882 return integer_zero_node;
1884 break;
1885 default:
1886 break;
1888 return NULL;
1891 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1893 static void
1894 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1895 omp_context *outer_ctx)
1897 struct walk_stmt_info wi;
1899 memset (&wi, 0, sizeof (wi));
1900 wi.val_only = true;
1901 wi.info = (void *) &msk;
1902 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1903 if (wi.info != (void *) &msk)
1905 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1906 struct omp_for_data fd;
1907 omp_extract_for_data (for_stmt, &fd, NULL);
1908 /* We need two temporaries with fd.loop.v type (istart/iend)
1909 and then (fd.collapse - 1) temporaries with the same
1910 type for count2 ... countN-1 vars if not constant. */
1911 size_t count = 2, i;
1912 tree type = fd.iter_type;
1913 if (fd.collapse > 1
1914 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1916 count += fd.collapse - 1;
1917 /* If there are lastprivate clauses on the inner
1918 GIMPLE_OMP_FOR, add one more temporaries for the total number
1919 of iterations (product of count1 ... countN-1). */
1920 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1921 OMP_CLAUSE_LASTPRIVATE))
1922 count++;
1923 else if (msk == GF_OMP_FOR_KIND_FOR
1924 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1925 OMP_CLAUSE_LASTPRIVATE))
1926 count++;
1928 for (i = 0; i < count; i++)
1930 tree temp = create_tmp_var (type);
1931 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1932 insert_decl_map (&outer_ctx->cb, temp, temp);
1933 OMP_CLAUSE_DECL (c) = temp;
1934 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1935 gimple_omp_taskreg_set_clauses (stmt, c);
1938 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1939 && omp_find_clause (gimple_omp_task_clauses (stmt),
1940 OMP_CLAUSE_REDUCTION))
1942 tree type = build_pointer_type (pointer_sized_int_node);
1943 tree temp = create_tmp_var (type);
1944 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1945 insert_decl_map (&outer_ctx->cb, temp, temp);
1946 OMP_CLAUSE_DECL (c) = temp;
1947 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1948 gimple_omp_task_set_clauses (stmt, c);
1952 /* Scan an OpenMP parallel directive. */
1954 static void
1955 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1957 omp_context *ctx;
1958 tree name;
1959 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1961 /* Ignore parallel directives with empty bodies, unless there
1962 are copyin clauses. */
1963 if (optimize > 0
1964 && empty_body_p (gimple_omp_body (stmt))
1965 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1966 OMP_CLAUSE_COPYIN) == NULL)
1968 gsi_replace (gsi, gimple_build_nop (), false);
1969 return;
1972 if (gimple_omp_parallel_combined_p (stmt))
1973 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1974 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1975 OMP_CLAUSE_REDUCTION);
1976 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1977 if (OMP_CLAUSE_REDUCTION_TASK (c))
1979 tree type = build_pointer_type (pointer_sized_int_node);
1980 tree temp = create_tmp_var (type);
1981 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1982 if (outer_ctx)
1983 insert_decl_map (&outer_ctx->cb, temp, temp);
1984 OMP_CLAUSE_DECL (c) = temp;
1985 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1986 gimple_omp_parallel_set_clauses (stmt, c);
1987 break;
1989 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1990 break;
1992 ctx = new_omp_context (stmt, outer_ctx);
1993 taskreg_contexts.safe_push (ctx);
1994 if (taskreg_nesting_level > 1)
1995 ctx->is_nested = true;
1996 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1997 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1998 name = create_tmp_var_name (".omp_data_s");
1999 name = build_decl (gimple_location (stmt),
2000 TYPE_DECL, name, ctx->record_type);
2001 DECL_ARTIFICIAL (name) = 1;
2002 DECL_NAMELESS (name) = 1;
2003 TYPE_NAME (ctx->record_type) = name;
2004 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2005 create_omp_child_function (ctx, false);
2006 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2008 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2009 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2011 if (TYPE_FIELDS (ctx->record_type) == NULL)
2012 ctx->record_type = ctx->receiver_decl = NULL;
2015 /* Scan an OpenMP task directive. */
2017 static void
2018 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2020 omp_context *ctx;
2021 tree name, t;
2022 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2024 /* Ignore task directives with empty bodies, unless they have depend
2025 clause. */
2026 if (optimize > 0
2027 && gimple_omp_body (stmt)
2028 && empty_body_p (gimple_omp_body (stmt))
2029 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2031 gsi_replace (gsi, gimple_build_nop (), false);
2032 return;
2035 if (gimple_omp_task_taskloop_p (stmt))
2036 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2038 ctx = new_omp_context (stmt, outer_ctx);
2040 if (gimple_omp_task_taskwait_p (stmt))
2042 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2043 return;
2046 taskreg_contexts.safe_push (ctx);
2047 if (taskreg_nesting_level > 1)
2048 ctx->is_nested = true;
2049 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2050 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2051 name = create_tmp_var_name (".omp_data_s");
2052 name = build_decl (gimple_location (stmt),
2053 TYPE_DECL, name, ctx->record_type);
2054 DECL_ARTIFICIAL (name) = 1;
2055 DECL_NAMELESS (name) = 1;
2056 TYPE_NAME (ctx->record_type) = name;
2057 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2058 create_omp_child_function (ctx, false);
2059 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2061 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2063 if (ctx->srecord_type)
2065 name = create_tmp_var_name (".omp_data_a");
2066 name = build_decl (gimple_location (stmt),
2067 TYPE_DECL, name, ctx->srecord_type);
2068 DECL_ARTIFICIAL (name) = 1;
2069 DECL_NAMELESS (name) = 1;
2070 TYPE_NAME (ctx->srecord_type) = name;
2071 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2072 create_omp_child_function (ctx, true);
2075 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2077 if (TYPE_FIELDS (ctx->record_type) == NULL)
2079 ctx->record_type = ctx->receiver_decl = NULL;
2080 t = build_int_cst (long_integer_type_node, 0);
2081 gimple_omp_task_set_arg_size (stmt, t);
2082 t = build_int_cst (long_integer_type_node, 1);
2083 gimple_omp_task_set_arg_align (stmt, t);
2087 /* Helper function for finish_taskreg_scan, called through walk_tree.
2088 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2089 tree, replace it in the expression. */
2091 static tree
2092 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2094 if (VAR_P (*tp))
2096 omp_context *ctx = (omp_context *) data;
2097 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2098 if (t != *tp)
2100 if (DECL_HAS_VALUE_EXPR_P (t))
2101 t = unshare_expr (DECL_VALUE_EXPR (t));
2102 *tp = t;
2104 *walk_subtrees = 0;
2106 else if (IS_TYPE_OR_DECL_P (*tp))
2107 *walk_subtrees = 0;
2108 return NULL_TREE;
2111 /* If any decls have been made addressable during scan_omp,
2112 adjust their fields if needed, and layout record types
2113 of parallel/task constructs. */
2115 static void
2116 finish_taskreg_scan (omp_context *ctx)
2118 if (ctx->record_type == NULL_TREE)
2119 return;
2121 /* If any task_shared_vars were needed, verify all
2122 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2123 statements if use_pointer_for_field hasn't changed
2124 because of that. If it did, update field types now. */
2125 if (task_shared_vars)
2127 tree c;
2129 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2130 c; c = OMP_CLAUSE_CHAIN (c))
2131 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2132 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2134 tree decl = OMP_CLAUSE_DECL (c);
2136 /* Global variables don't need to be copied,
2137 the receiver side will use them directly. */
2138 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2139 continue;
2140 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2141 || !use_pointer_for_field (decl, ctx))
2142 continue;
2143 tree field = lookup_field (decl, ctx);
2144 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2145 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2146 continue;
2147 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2148 TREE_THIS_VOLATILE (field) = 0;
2149 DECL_USER_ALIGN (field) = 0;
2150 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2151 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2152 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2153 if (ctx->srecord_type)
2155 tree sfield = lookup_sfield (decl, ctx);
2156 TREE_TYPE (sfield) = TREE_TYPE (field);
2157 TREE_THIS_VOLATILE (sfield) = 0;
2158 DECL_USER_ALIGN (sfield) = 0;
2159 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2160 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2161 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2166 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2168 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2169 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2170 if (c)
2172 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2173 expects to find it at the start of data. */
2174 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2175 tree *p = &TYPE_FIELDS (ctx->record_type);
2176 while (*p)
2177 if (*p == f)
2179 *p = DECL_CHAIN (*p);
2180 break;
2182 else
2183 p = &DECL_CHAIN (*p);
2184 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2185 TYPE_FIELDS (ctx->record_type) = f;
2187 layout_type (ctx->record_type);
2188 fixup_child_record_type (ctx);
2190 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2192 layout_type (ctx->record_type);
2193 fixup_child_record_type (ctx);
2195 else
2197 location_t loc = gimple_location (ctx->stmt);
2198 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2199 /* Move VLA fields to the end. */
2200 p = &TYPE_FIELDS (ctx->record_type);
2201 while (*p)
2202 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2203 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2205 *q = *p;
2206 *p = TREE_CHAIN (*p);
2207 TREE_CHAIN (*q) = NULL_TREE;
2208 q = &TREE_CHAIN (*q);
2210 else
2211 p = &DECL_CHAIN (*p);
2212 *p = vla_fields;
2213 if (gimple_omp_task_taskloop_p (ctx->stmt))
2215 /* Move fields corresponding to first and second _looptemp_
2216 clause first. There are filled by GOMP_taskloop
2217 and thus need to be in specific positions. */
2218 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2219 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2220 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2221 OMP_CLAUSE__LOOPTEMP_);
2222 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2223 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2224 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2225 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2226 p = &TYPE_FIELDS (ctx->record_type);
2227 while (*p)
2228 if (*p == f1 || *p == f2 || *p == f3)
2229 *p = DECL_CHAIN (*p);
2230 else
2231 p = &DECL_CHAIN (*p);
2232 DECL_CHAIN (f1) = f2;
2233 if (c3)
2235 DECL_CHAIN (f2) = f3;
2236 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2238 else
2239 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2240 TYPE_FIELDS (ctx->record_type) = f1;
2241 if (ctx->srecord_type)
2243 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2244 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2245 if (c3)
2246 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2247 p = &TYPE_FIELDS (ctx->srecord_type);
2248 while (*p)
2249 if (*p == f1 || *p == f2 || *p == f3)
2250 *p = DECL_CHAIN (*p);
2251 else
2252 p = &DECL_CHAIN (*p);
2253 DECL_CHAIN (f1) = f2;
2254 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2255 if (c3)
2257 DECL_CHAIN (f2) = f3;
2258 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2260 else
2261 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2262 TYPE_FIELDS (ctx->srecord_type) = f1;
2265 layout_type (ctx->record_type);
2266 fixup_child_record_type (ctx);
2267 if (ctx->srecord_type)
2268 layout_type (ctx->srecord_type);
2269 tree t = fold_convert_loc (loc, long_integer_type_node,
2270 TYPE_SIZE_UNIT (ctx->record_type));
2271 if (TREE_CODE (t) != INTEGER_CST)
2273 t = unshare_expr (t);
2274 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2276 gimple_omp_task_set_arg_size (ctx->stmt, t);
2277 t = build_int_cst (long_integer_type_node,
2278 TYPE_ALIGN_UNIT (ctx->record_type));
2279 gimple_omp_task_set_arg_align (ctx->stmt, t);
2283 /* Find the enclosing offload context. */
2285 static omp_context *
2286 enclosing_target_ctx (omp_context *ctx)
2288 for (; ctx; ctx = ctx->outer)
2289 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2290 break;
2292 return ctx;
2295 /* Return true if ctx is part of an oacc kernels region. */
2297 static bool
2298 ctx_in_oacc_kernels_region (omp_context *ctx)
2300 for (;ctx != NULL; ctx = ctx->outer)
2302 gimple *stmt = ctx->stmt;
2303 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2304 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2305 return true;
2308 return false;
2311 /* Check the parallelism clauses inside a kernels regions.
2312 Until kernels handling moves to use the same loop indirection
2313 scheme as parallel, we need to do this checking early. */
2315 static unsigned
2316 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2318 bool checking = true;
2319 unsigned outer_mask = 0;
2320 unsigned this_mask = 0;
2321 bool has_seq = false, has_auto = false;
2323 if (ctx->outer)
2324 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2325 if (!stmt)
2327 checking = false;
2328 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2329 return outer_mask;
2330 stmt = as_a <gomp_for *> (ctx->stmt);
2333 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2335 switch (OMP_CLAUSE_CODE (c))
2337 case OMP_CLAUSE_GANG:
2338 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2339 break;
2340 case OMP_CLAUSE_WORKER:
2341 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2342 break;
2343 case OMP_CLAUSE_VECTOR:
2344 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2345 break;
2346 case OMP_CLAUSE_SEQ:
2347 has_seq = true;
2348 break;
2349 case OMP_CLAUSE_AUTO:
2350 has_auto = true;
2351 break;
2352 default:
2353 break;
2357 if (checking)
2359 if (has_seq && (this_mask || has_auto))
2360 error_at (gimple_location (stmt), "%<seq%> overrides other"
2361 " OpenACC loop specifiers");
2362 else if (has_auto && this_mask)
2363 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2364 " OpenACC loop specifiers");
2366 if (this_mask & outer_mask)
2367 error_at (gimple_location (stmt), "inner loop uses same"
2368 " OpenACC parallelism as containing loop");
2371 return outer_mask | this_mask;
2374 /* Scan a GIMPLE_OMP_FOR. */
2376 static omp_context *
2377 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2379 omp_context *ctx;
2380 size_t i;
2381 tree clauses = gimple_omp_for_clauses (stmt);
2383 ctx = new_omp_context (stmt, outer_ctx);
2385 if (is_gimple_omp_oacc (stmt))
2387 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2389 if (!tgt || is_oacc_parallel_or_serial (tgt))
2390 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2392 char const *check = NULL;
2394 switch (OMP_CLAUSE_CODE (c))
2396 case OMP_CLAUSE_GANG:
2397 check = "gang";
2398 break;
2400 case OMP_CLAUSE_WORKER:
2401 check = "worker";
2402 break;
2404 case OMP_CLAUSE_VECTOR:
2405 check = "vector";
2406 break;
2408 default:
2409 break;
2412 if (check && OMP_CLAUSE_OPERAND (c, 0))
2413 error_at (gimple_location (stmt),
2414 "argument not permitted on %qs clause in"
2415 " OpenACC %<parallel%> or %<serial%>", check);
2418 if (tgt && is_oacc_kernels (tgt))
2420 /* Strip out reductions, as they are not handled yet. */
2421 tree *prev_ptr = &clauses;
2423 while (tree probe = *prev_ptr)
2425 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2427 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2428 *prev_ptr = *next_ptr;
2429 else
2430 prev_ptr = next_ptr;
2433 gimple_omp_for_set_clauses (stmt, clauses);
2434 check_oacc_kernel_gwv (stmt, ctx);
2437 /* Collect all variables named in reductions on this loop. Ensure
2438 that, if this loop has a reduction on some variable v, and there is
2439 a reduction on v somewhere in an outer context, then there is a
2440 reduction on v on all intervening loops as well. */
2441 tree local_reduction_clauses = NULL;
2442 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2444 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2445 local_reduction_clauses
2446 = tree_cons (NULL, c, local_reduction_clauses);
2448 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2449 ctx->outer_reduction_clauses
2450 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2451 ctx->outer->outer_reduction_clauses);
2452 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2453 tree local_iter = local_reduction_clauses;
2454 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2456 tree local_clause = TREE_VALUE (local_iter);
2457 tree local_var = OMP_CLAUSE_DECL (local_clause);
2458 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2459 bool have_outer_reduction = false;
2460 tree ctx_iter = outer_reduction_clauses;
2461 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2463 tree outer_clause = TREE_VALUE (ctx_iter);
2464 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2465 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2466 if (outer_var == local_var && outer_op != local_op)
2468 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2469 "conflicting reduction operations for %qE",
2470 local_var);
2471 inform (OMP_CLAUSE_LOCATION (outer_clause),
2472 "location of the previous reduction for %qE",
2473 outer_var);
2475 if (outer_var == local_var)
2477 have_outer_reduction = true;
2478 break;
2481 if (have_outer_reduction)
2483 /* There is a reduction on outer_var both on this loop and on
2484 some enclosing loop. Walk up the context tree until such a
2485 loop with a reduction on outer_var is found, and complain
2486 about all intervening loops that do not have such a
2487 reduction. */
2488 struct omp_context *curr_loop = ctx->outer;
2489 bool found = false;
2490 while (curr_loop != NULL)
2492 tree curr_iter = curr_loop->local_reduction_clauses;
2493 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2495 tree curr_clause = TREE_VALUE (curr_iter);
2496 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2497 if (curr_var == local_var)
2499 found = true;
2500 break;
2503 if (!found)
2504 warning_at (gimple_location (curr_loop->stmt), 0,
2505 "nested loop in reduction needs "
2506 "reduction clause for %qE",
2507 local_var);
2508 else
2509 break;
2510 curr_loop = curr_loop->outer;
2514 ctx->local_reduction_clauses = local_reduction_clauses;
2515 ctx->outer_reduction_clauses
2516 = chainon (unshare_expr (ctx->local_reduction_clauses),
2517 ctx->outer_reduction_clauses);
2520 scan_sharing_clauses (clauses, ctx);
2522 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2523 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2525 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2526 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2527 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2528 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2530 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2531 return ctx;
2534 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2536 static void
2537 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2538 omp_context *outer_ctx)
2540 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2541 gsi_replace (gsi, bind, false);
2542 gimple_seq seq = NULL;
2543 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2544 tree cond = create_tmp_var_raw (integer_type_node);
2545 DECL_CONTEXT (cond) = current_function_decl;
2546 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2547 gimple_bind_set_vars (bind, cond);
2548 gimple_call_set_lhs (g, cond);
2549 gimple_seq_add_stmt (&seq, g);
2550 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2551 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2552 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2553 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2554 gimple_seq_add_stmt (&seq, g);
2555 g = gimple_build_label (lab1);
2556 gimple_seq_add_stmt (&seq, g);
2557 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2558 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2559 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2560 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2561 gimple_omp_for_set_clauses (new_stmt, clause);
2562 gimple_seq_add_stmt (&seq, new_stmt);
2563 g = gimple_build_goto (lab3);
2564 gimple_seq_add_stmt (&seq, g);
2565 g = gimple_build_label (lab2);
2566 gimple_seq_add_stmt (&seq, g);
2567 gimple_seq_add_stmt (&seq, stmt);
2568 g = gimple_build_label (lab3);
2569 gimple_seq_add_stmt (&seq, g);
2570 gimple_bind_set_body (bind, seq);
2571 update_stmt (bind);
2572 scan_omp_for (new_stmt, outer_ctx);
2573 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2576 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2577 struct walk_stmt_info *);
2578 static omp_context *maybe_lookup_ctx (gimple *);
2580 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2581 for scan phase loop. */
2583 static void
2584 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2585 omp_context *outer_ctx)
2587 /* The only change between inclusive and exclusive scan will be
2588 within the first simd loop, so just use inclusive in the
2589 worksharing loop. */
2590 outer_ctx->scan_inclusive = true;
2591 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2592 OMP_CLAUSE_DECL (c) = integer_zero_node;
2594 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2595 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2596 gsi_replace (gsi, input_stmt, false);
2597 gimple_seq input_body = NULL;
2598 gimple_seq_add_stmt (&input_body, stmt);
2599 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2601 gimple_stmt_iterator input1_gsi = gsi_none ();
2602 struct walk_stmt_info wi;
2603 memset (&wi, 0, sizeof (wi));
2604 wi.val_only = true;
2605 wi.info = (void *) &input1_gsi;
2606 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2607 gcc_assert (!gsi_end_p (input1_gsi));
2609 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2610 gsi_next (&input1_gsi);
2611 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2612 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2613 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2614 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2615 std::swap (input_stmt1, scan_stmt1);
2617 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2618 gimple_omp_set_body (input_stmt1, NULL);
2620 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2621 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2623 gimple_omp_set_body (input_stmt1, input_body1);
2624 gimple_omp_set_body (scan_stmt1, NULL);
2626 gimple_stmt_iterator input2_gsi = gsi_none ();
2627 memset (&wi, 0, sizeof (wi));
2628 wi.val_only = true;
2629 wi.info = (void *) &input2_gsi;
2630 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2631 NULL, &wi);
2632 gcc_assert (!gsi_end_p (input2_gsi));
2634 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2635 gsi_next (&input2_gsi);
2636 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2637 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2638 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2639 std::swap (input_stmt2, scan_stmt2);
2641 gimple_omp_set_body (input_stmt2, NULL);
2643 gimple_omp_set_body (input_stmt, input_body);
2644 gimple_omp_set_body (scan_stmt, scan_body);
2646 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2647 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2649 ctx = new_omp_context (scan_stmt, outer_ctx);
2650 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2652 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2655 /* Scan an OpenMP sections directive. */
2657 static void
2658 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2660 omp_context *ctx;
2662 ctx = new_omp_context (stmt, outer_ctx);
2663 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2664 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2667 /* Scan an OpenMP single directive. */
2669 static void
2670 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2672 omp_context *ctx;
2673 tree name;
2675 ctx = new_omp_context (stmt, outer_ctx);
2676 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2677 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2678 name = create_tmp_var_name (".omp_copy_s");
2679 name = build_decl (gimple_location (stmt),
2680 TYPE_DECL, name, ctx->record_type);
2681 TYPE_NAME (ctx->record_type) = name;
2683 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2684 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2686 if (TYPE_FIELDS (ctx->record_type) == NULL)
2687 ctx->record_type = NULL;
2688 else
2689 layout_type (ctx->record_type);
2692 /* Scan a GIMPLE_OMP_TARGET. */
2694 static void
2695 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2697 omp_context *ctx;
2698 tree name;
2699 bool offloaded = is_gimple_omp_offloaded (stmt);
2700 tree clauses = gimple_omp_target_clauses (stmt);
2702 ctx = new_omp_context (stmt, outer_ctx);
2703 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2704 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2705 name = create_tmp_var_name (".omp_data_t");
2706 name = build_decl (gimple_location (stmt),
2707 TYPE_DECL, name, ctx->record_type);
2708 DECL_ARTIFICIAL (name) = 1;
2709 DECL_NAMELESS (name) = 1;
2710 TYPE_NAME (ctx->record_type) = name;
2711 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2713 if (offloaded)
2715 create_omp_child_function (ctx, false);
2716 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2719 scan_sharing_clauses (clauses, ctx);
2720 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2722 if (TYPE_FIELDS (ctx->record_type) == NULL)
2723 ctx->record_type = ctx->receiver_decl = NULL;
2724 else
2726 TYPE_FIELDS (ctx->record_type)
2727 = nreverse (TYPE_FIELDS (ctx->record_type));
2728 if (flag_checking)
2730 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2731 for (tree field = TYPE_FIELDS (ctx->record_type);
2732 field;
2733 field = DECL_CHAIN (field))
2734 gcc_assert (DECL_ALIGN (field) == align);
2736 layout_type (ctx->record_type);
2737 if (offloaded)
2738 fixup_child_record_type (ctx);
2742 /* Scan an OpenMP teams directive. */
2744 static void
2745 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2747 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2749 if (!gimple_omp_teams_host (stmt))
2751 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2752 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2753 return;
2755 taskreg_contexts.safe_push (ctx);
2756 gcc_assert (taskreg_nesting_level == 1);
2757 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2758 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2759 tree name = create_tmp_var_name (".omp_data_s");
2760 name = build_decl (gimple_location (stmt),
2761 TYPE_DECL, name, ctx->record_type);
2762 DECL_ARTIFICIAL (name) = 1;
2763 DECL_NAMELESS (name) = 1;
2764 TYPE_NAME (ctx->record_type) = name;
2765 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2766 create_omp_child_function (ctx, false);
2767 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2769 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2770 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2772 if (TYPE_FIELDS (ctx->record_type) == NULL)
2773 ctx->record_type = ctx->receiver_decl = NULL;
2776 /* Check nesting restrictions. */
2777 static bool
2778 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2780 tree c;
2782 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2783 inside an OpenACC CTX. */
2784 if (!(is_gimple_omp (stmt)
2785 && is_gimple_omp_oacc (stmt))
2786 /* Except for atomic codes that we share with OpenMP. */
2787 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2788 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2790 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2792 error_at (gimple_location (stmt),
2793 "non-OpenACC construct inside of OpenACC routine");
2794 return false;
2796 else
2797 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2798 if (is_gimple_omp (octx->stmt)
2799 && is_gimple_omp_oacc (octx->stmt))
2801 error_at (gimple_location (stmt),
2802 "non-OpenACC construct inside of OpenACC region");
2803 return false;
2807 if (ctx != NULL)
2809 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2810 && ctx->outer
2811 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2812 ctx = ctx->outer;
2813 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2814 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
2815 && !ctx->loop_p)
2817 c = NULL_TREE;
2818 if (ctx->order_concurrent
2819 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
2820 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2821 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2823 error_at (gimple_location (stmt),
2824 "OpenMP constructs other than %<parallel%>, %<loop%>"
2825 " or %<simd%> may not be nested inside a region with"
2826 " the %<order(concurrent)%> clause");
2827 return false;
2829 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2831 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2832 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2834 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2835 && (ctx->outer == NULL
2836 || !gimple_omp_for_combined_into_p (ctx->stmt)
2837 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2838 || (gimple_omp_for_kind (ctx->outer->stmt)
2839 != GF_OMP_FOR_KIND_FOR)
2840 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2842 error_at (gimple_location (stmt),
2843 "%<ordered simd threads%> must be closely "
2844 "nested inside of %<for simd%> region");
2845 return false;
2847 return true;
2850 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2851 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2852 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2853 return true;
2854 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
2855 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
2856 return true;
2857 error_at (gimple_location (stmt),
2858 "OpenMP constructs other than "
2859 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2860 "not be nested inside %<simd%> region");
2861 return false;
2863 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2865 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2866 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
2867 && omp_find_clause (gimple_omp_for_clauses (stmt),
2868 OMP_CLAUSE_BIND) == NULL_TREE))
2869 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2871 error_at (gimple_location (stmt),
2872 "only %<distribute%>, %<parallel%> or %<loop%> "
2873 "regions are allowed to be strictly nested inside "
2874 "%<teams%> region");
2875 return false;
2878 else if (ctx->order_concurrent
2879 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
2880 && (gimple_code (stmt) != GIMPLE_OMP_FOR
2881 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
2882 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
2884 if (ctx->loop_p)
2885 error_at (gimple_location (stmt),
2886 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2887 "%<simd%> may not be nested inside a %<loop%> region");
2888 else
2889 error_at (gimple_location (stmt),
2890 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2891 "%<simd%> may not be nested inside a region with "
2892 "the %<order(concurrent)%> clause");
2893 return false;
2896 switch (gimple_code (stmt))
2898 case GIMPLE_OMP_FOR:
2899 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
2900 return true;
2901 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2903 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2905 error_at (gimple_location (stmt),
2906 "%<distribute%> region must be strictly nested "
2907 "inside %<teams%> construct");
2908 return false;
2910 return true;
2912 /* We split taskloop into task and nested taskloop in it. */
2913 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2914 return true;
2915 /* For now, hope this will change and loop bind(parallel) will not
2916 be allowed in lots of contexts. */
2917 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
2918 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
2919 return true;
2920 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2922 bool ok = false;
2924 if (ctx)
2925 switch (gimple_code (ctx->stmt))
2927 case GIMPLE_OMP_FOR:
2928 ok = (gimple_omp_for_kind (ctx->stmt)
2929 == GF_OMP_FOR_KIND_OACC_LOOP);
2930 break;
2932 case GIMPLE_OMP_TARGET:
2933 switch (gimple_omp_target_kind (ctx->stmt))
2935 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2936 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2937 case GF_OMP_TARGET_KIND_OACC_SERIAL:
2938 ok = true;
2939 break;
2941 default:
2942 break;
2945 default:
2946 break;
2948 else if (oacc_get_fn_attrib (current_function_decl))
2949 ok = true;
2950 if (!ok)
2952 error_at (gimple_location (stmt),
2953 "OpenACC loop directive must be associated with"
2954 " an OpenACC compute region");
2955 return false;
2958 /* FALLTHRU */
2959 case GIMPLE_CALL:
2960 if (is_gimple_call (stmt)
2961 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2962 == BUILT_IN_GOMP_CANCEL
2963 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2964 == BUILT_IN_GOMP_CANCELLATION_POINT))
2966 const char *bad = NULL;
2967 const char *kind = NULL;
2968 const char *construct
2969 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2970 == BUILT_IN_GOMP_CANCEL)
2971 ? "cancel"
2972 : "cancellation point";
2973 if (ctx == NULL)
2975 error_at (gimple_location (stmt), "orphaned %qs construct",
2976 construct);
2977 return false;
2979 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2980 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2981 : 0)
2983 case 1:
2984 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2985 bad = "parallel";
2986 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2987 == BUILT_IN_GOMP_CANCEL
2988 && !integer_zerop (gimple_call_arg (stmt, 1)))
2989 ctx->cancellable = true;
2990 kind = "parallel";
2991 break;
2992 case 2:
2993 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2994 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2995 bad = "for";
2996 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2997 == BUILT_IN_GOMP_CANCEL
2998 && !integer_zerop (gimple_call_arg (stmt, 1)))
3000 ctx->cancellable = true;
3001 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3002 OMP_CLAUSE_NOWAIT))
3003 warning_at (gimple_location (stmt), 0,
3004 "%<cancel for%> inside "
3005 "%<nowait%> for construct");
3006 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3007 OMP_CLAUSE_ORDERED))
3008 warning_at (gimple_location (stmt), 0,
3009 "%<cancel for%> inside "
3010 "%<ordered%> for construct");
3012 kind = "for";
3013 break;
3014 case 4:
3015 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3016 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3017 bad = "sections";
3018 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3019 == BUILT_IN_GOMP_CANCEL
3020 && !integer_zerop (gimple_call_arg (stmt, 1)))
3022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3024 ctx->cancellable = true;
3025 if (omp_find_clause (gimple_omp_sections_clauses
3026 (ctx->stmt),
3027 OMP_CLAUSE_NOWAIT))
3028 warning_at (gimple_location (stmt), 0,
3029 "%<cancel sections%> inside "
3030 "%<nowait%> sections construct");
3032 else
3034 gcc_assert (ctx->outer
3035 && gimple_code (ctx->outer->stmt)
3036 == GIMPLE_OMP_SECTIONS);
3037 ctx->outer->cancellable = true;
3038 if (omp_find_clause (gimple_omp_sections_clauses
3039 (ctx->outer->stmt),
3040 OMP_CLAUSE_NOWAIT))
3041 warning_at (gimple_location (stmt), 0,
3042 "%<cancel sections%> inside "
3043 "%<nowait%> sections construct");
3046 kind = "sections";
3047 break;
3048 case 8:
3049 if (!is_task_ctx (ctx)
3050 && (!is_taskloop_ctx (ctx)
3051 || ctx->outer == NULL
3052 || !is_task_ctx (ctx->outer)))
3053 bad = "task";
3054 else
3056 for (omp_context *octx = ctx->outer;
3057 octx; octx = octx->outer)
3059 switch (gimple_code (octx->stmt))
3061 case GIMPLE_OMP_TASKGROUP:
3062 break;
3063 case GIMPLE_OMP_TARGET:
3064 if (gimple_omp_target_kind (octx->stmt)
3065 != GF_OMP_TARGET_KIND_REGION)
3066 continue;
3067 /* FALLTHRU */
3068 case GIMPLE_OMP_PARALLEL:
3069 case GIMPLE_OMP_TEAMS:
3070 error_at (gimple_location (stmt),
3071 "%<%s taskgroup%> construct not closely "
3072 "nested inside of %<taskgroup%> region",
3073 construct);
3074 return false;
3075 case GIMPLE_OMP_TASK:
3076 if (gimple_omp_task_taskloop_p (octx->stmt)
3077 && octx->outer
3078 && is_taskloop_ctx (octx->outer))
3080 tree clauses
3081 = gimple_omp_for_clauses (octx->outer->stmt);
3082 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3083 break;
3085 continue;
3086 default:
3087 continue;
3089 break;
3091 ctx->cancellable = true;
3093 kind = "taskgroup";
3094 break;
3095 default:
3096 error_at (gimple_location (stmt), "invalid arguments");
3097 return false;
3099 if (bad)
3101 error_at (gimple_location (stmt),
3102 "%<%s %s%> construct not closely nested inside of %qs",
3103 construct, kind, bad);
3104 return false;
3107 /* FALLTHRU */
3108 case GIMPLE_OMP_SECTIONS:
3109 case GIMPLE_OMP_SINGLE:
3110 for (; ctx != NULL; ctx = ctx->outer)
3111 switch (gimple_code (ctx->stmt))
3113 case GIMPLE_OMP_FOR:
3114 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3115 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3116 break;
3117 /* FALLTHRU */
3118 case GIMPLE_OMP_SECTIONS:
3119 case GIMPLE_OMP_SINGLE:
3120 case GIMPLE_OMP_ORDERED:
3121 case GIMPLE_OMP_MASTER:
3122 case GIMPLE_OMP_TASK:
3123 case GIMPLE_OMP_CRITICAL:
3124 if (is_gimple_call (stmt))
3126 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3127 != BUILT_IN_GOMP_BARRIER)
3128 return true;
3129 error_at (gimple_location (stmt),
3130 "barrier region may not be closely nested inside "
3131 "of work-sharing, %<loop%>, %<critical%>, "
3132 "%<ordered%>, %<master%>, explicit %<task%> or "
3133 "%<taskloop%> region");
3134 return false;
3136 error_at (gimple_location (stmt),
3137 "work-sharing region may not be closely nested inside "
3138 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3139 "%<master%>, explicit %<task%> or %<taskloop%> region");
3140 return false;
3141 case GIMPLE_OMP_PARALLEL:
3142 case GIMPLE_OMP_TEAMS:
3143 return true;
3144 case GIMPLE_OMP_TARGET:
3145 if (gimple_omp_target_kind (ctx->stmt)
3146 == GF_OMP_TARGET_KIND_REGION)
3147 return true;
3148 break;
3149 default:
3150 break;
3152 break;
3153 case GIMPLE_OMP_MASTER:
3154 for (; ctx != NULL; ctx = ctx->outer)
3155 switch (gimple_code (ctx->stmt))
3157 case GIMPLE_OMP_FOR:
3158 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3159 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3160 break;
3161 /* FALLTHRU */
3162 case GIMPLE_OMP_SECTIONS:
3163 case GIMPLE_OMP_SINGLE:
3164 case GIMPLE_OMP_TASK:
3165 error_at (gimple_location (stmt),
3166 "%<master%> region may not be closely nested inside "
3167 "of work-sharing, %<loop%>, explicit %<task%> or "
3168 "%<taskloop%> region");
3169 return false;
3170 case GIMPLE_OMP_PARALLEL:
3171 case GIMPLE_OMP_TEAMS:
3172 return true;
3173 case GIMPLE_OMP_TARGET:
3174 if (gimple_omp_target_kind (ctx->stmt)
3175 == GF_OMP_TARGET_KIND_REGION)
3176 return true;
3177 break;
3178 default:
3179 break;
3181 break;
3182 case GIMPLE_OMP_TASK:
3183 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3184 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3185 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3186 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3188 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3189 error_at (OMP_CLAUSE_LOCATION (c),
3190 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3191 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3192 return false;
3194 break;
3195 case GIMPLE_OMP_ORDERED:
3196 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3197 c; c = OMP_CLAUSE_CHAIN (c))
3199 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3201 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3202 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3203 continue;
3205 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3206 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3207 || kind == OMP_CLAUSE_DEPEND_SINK)
3209 tree oclause;
3210 /* Look for containing ordered(N) loop. */
3211 if (ctx == NULL
3212 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3213 || (oclause
3214 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3215 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3217 error_at (OMP_CLAUSE_LOCATION (c),
3218 "%<ordered%> construct with %<depend%> clause "
3219 "must be closely nested inside an %<ordered%> "
3220 "loop");
3221 return false;
3223 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3225 error_at (OMP_CLAUSE_LOCATION (c),
3226 "%<ordered%> construct with %<depend%> clause "
3227 "must be closely nested inside a loop with "
3228 "%<ordered%> clause with a parameter");
3229 return false;
3232 else
3234 error_at (OMP_CLAUSE_LOCATION (c),
3235 "invalid depend kind in omp %<ordered%> %<depend%>");
3236 return false;
3239 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3240 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3242 /* ordered simd must be closely nested inside of simd region,
3243 and simd region must not encounter constructs other than
3244 ordered simd, therefore ordered simd may be either orphaned,
3245 or ctx->stmt must be simd. The latter case is handled already
3246 earlier. */
3247 if (ctx != NULL)
3249 error_at (gimple_location (stmt),
3250 "%<ordered%> %<simd%> must be closely nested inside "
3251 "%<simd%> region");
3252 return false;
3255 for (; ctx != NULL; ctx = ctx->outer)
3256 switch (gimple_code (ctx->stmt))
3258 case GIMPLE_OMP_CRITICAL:
3259 case GIMPLE_OMP_TASK:
3260 case GIMPLE_OMP_ORDERED:
3261 ordered_in_taskloop:
3262 error_at (gimple_location (stmt),
3263 "%<ordered%> region may not be closely nested inside "
3264 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3265 "%<taskloop%> region");
3266 return false;
3267 case GIMPLE_OMP_FOR:
3268 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3269 goto ordered_in_taskloop;
3270 tree o;
3271 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3272 OMP_CLAUSE_ORDERED);
3273 if (o == NULL)
3275 error_at (gimple_location (stmt),
3276 "%<ordered%> region must be closely nested inside "
3277 "a loop region with an %<ordered%> clause");
3278 return false;
3280 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3281 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3283 error_at (gimple_location (stmt),
3284 "%<ordered%> region without %<depend%> clause may "
3285 "not be closely nested inside a loop region with "
3286 "an %<ordered%> clause with a parameter");
3287 return false;
3289 return true;
3290 case GIMPLE_OMP_TARGET:
3291 if (gimple_omp_target_kind (ctx->stmt)
3292 != GF_OMP_TARGET_KIND_REGION)
3293 break;
3294 /* FALLTHRU */
3295 case GIMPLE_OMP_PARALLEL:
3296 case GIMPLE_OMP_TEAMS:
3297 error_at (gimple_location (stmt),
3298 "%<ordered%> region must be closely nested inside "
3299 "a loop region with an %<ordered%> clause");
3300 return false;
3301 default:
3302 break;
3304 break;
3305 case GIMPLE_OMP_CRITICAL:
3307 tree this_stmt_name
3308 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3309 for (; ctx != NULL; ctx = ctx->outer)
3310 if (gomp_critical *other_crit
3311 = dyn_cast <gomp_critical *> (ctx->stmt))
3312 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3314 error_at (gimple_location (stmt),
3315 "%<critical%> region may not be nested inside "
3316 "a %<critical%> region with the same name");
3317 return false;
3320 break;
3321 case GIMPLE_OMP_TEAMS:
3322 if (ctx == NULL)
3323 break;
3324 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3325 || (gimple_omp_target_kind (ctx->stmt)
3326 != GF_OMP_TARGET_KIND_REGION))
3328 /* Teams construct can appear either strictly nested inside of
3329 target construct with no intervening stmts, or can be encountered
3330 only by initial task (so must not appear inside any OpenMP
3331 construct. */
3332 error_at (gimple_location (stmt),
3333 "%<teams%> construct must be closely nested inside of "
3334 "%<target%> construct or not nested in any OpenMP "
3335 "construct");
3336 return false;
3338 break;
3339 case GIMPLE_OMP_TARGET:
3340 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3341 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3342 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3343 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3345 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3346 error_at (OMP_CLAUSE_LOCATION (c),
3347 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3348 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3349 return false;
3351 if (is_gimple_omp_offloaded (stmt)
3352 && oacc_get_fn_attrib (cfun->decl) != NULL)
3354 error_at (gimple_location (stmt),
3355 "OpenACC region inside of OpenACC routine, nested "
3356 "parallelism not supported yet");
3357 return false;
3359 for (; ctx != NULL; ctx = ctx->outer)
3361 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3363 if (is_gimple_omp (stmt)
3364 && is_gimple_omp_oacc (stmt)
3365 && is_gimple_omp (ctx->stmt))
3367 error_at (gimple_location (stmt),
3368 "OpenACC construct inside of non-OpenACC region");
3369 return false;
3371 continue;
3374 const char *stmt_name, *ctx_stmt_name;
3375 switch (gimple_omp_target_kind (stmt))
3377 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3378 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3379 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3380 case GF_OMP_TARGET_KIND_ENTER_DATA:
3381 stmt_name = "target enter data"; break;
3382 case GF_OMP_TARGET_KIND_EXIT_DATA:
3383 stmt_name = "target exit data"; break;
3384 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3385 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3386 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3387 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3388 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3389 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3390 stmt_name = "enter/exit data"; break;
3391 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3392 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3393 break;
3394 default: gcc_unreachable ();
3396 switch (gimple_omp_target_kind (ctx->stmt))
3398 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3399 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3400 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3401 ctx_stmt_name = "parallel"; break;
3402 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3403 ctx_stmt_name = "kernels"; break;
3404 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3405 ctx_stmt_name = "serial"; break;
3406 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3407 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3408 ctx_stmt_name = "host_data"; break;
3409 default: gcc_unreachable ();
3412 /* OpenACC/OpenMP mismatch? */
3413 if (is_gimple_omp_oacc (stmt)
3414 != is_gimple_omp_oacc (ctx->stmt))
3416 error_at (gimple_location (stmt),
3417 "%s %qs construct inside of %s %qs region",
3418 (is_gimple_omp_oacc (stmt)
3419 ? "OpenACC" : "OpenMP"), stmt_name,
3420 (is_gimple_omp_oacc (ctx->stmt)
3421 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3422 return false;
3424 if (is_gimple_omp_offloaded (ctx->stmt))
3426 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3427 if (is_gimple_omp_oacc (ctx->stmt))
3429 error_at (gimple_location (stmt),
3430 "%qs construct inside of %qs region",
3431 stmt_name, ctx_stmt_name);
3432 return false;
3434 else
3436 warning_at (gimple_location (stmt), 0,
3437 "%qs construct inside of %qs region",
3438 stmt_name, ctx_stmt_name);
3442 break;
3443 default:
3444 break;
3446 return true;
3450 /* Helper function scan_omp.
3452 Callback for walk_tree or operators in walk_gimple_stmt used to
3453 scan for OMP directives in TP. */
3455 static tree
3456 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3458 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3459 omp_context *ctx = (omp_context *) wi->info;
3460 tree t = *tp;
3462 switch (TREE_CODE (t))
3464 case VAR_DECL:
3465 case PARM_DECL:
3466 case LABEL_DECL:
3467 case RESULT_DECL:
3468 if (ctx)
3470 tree repl = remap_decl (t, &ctx->cb);
3471 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3472 *tp = repl;
3474 break;
3476 default:
3477 if (ctx && TYPE_P (t))
3478 *tp = remap_type (t, &ctx->cb);
3479 else if (!DECL_P (t))
3481 *walk_subtrees = 1;
3482 if (ctx)
3484 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3485 if (tem != TREE_TYPE (t))
3487 if (TREE_CODE (t) == INTEGER_CST)
3488 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3489 else
3490 TREE_TYPE (t) = tem;
3494 break;
3497 return NULL_TREE;
3500 /* Return true if FNDECL is a setjmp or a longjmp. */
3502 static bool
3503 setjmp_or_longjmp_p (const_tree fndecl)
3505 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3506 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3507 return true;
3509 tree declname = DECL_NAME (fndecl);
3510 if (!declname
3511 || (DECL_CONTEXT (fndecl) != NULL_TREE
3512 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3513 || !TREE_PUBLIC (fndecl))
3514 return false;
3516 const char *name = IDENTIFIER_POINTER (declname);
3517 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3520 /* Return true if FNDECL is an omp_* runtime API call. */
3522 static bool
3523 omp_runtime_api_call (const_tree fndecl)
3525 tree declname = DECL_NAME (fndecl);
3526 if (!declname
3527 || (DECL_CONTEXT (fndecl) != NULL_TREE
3528 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3529 || !TREE_PUBLIC (fndecl))
3530 return false;
3532 const char *name = IDENTIFIER_POINTER (declname);
3533 if (strncmp (name, "omp_", 4) != 0)
3534 return false;
3536 static const char *omp_runtime_apis[] =
3538 /* This array has 3 sections. First omp_* calls that don't
3539 have any suffixes. */
3540 "target_alloc",
3541 "target_associate_ptr",
3542 "target_disassociate_ptr",
3543 "target_free",
3544 "target_is_present",
3545 "target_memcpy",
3546 "target_memcpy_rect",
3547 NULL,
3548 /* Now omp_* calls that are available as omp_* and omp_*_. */
3549 "capture_affinity",
3550 "destroy_lock",
3551 "destroy_nest_lock",
3552 "display_affinity",
3553 "get_active_level",
3554 "get_affinity_format",
3555 "get_cancellation",
3556 "get_default_device",
3557 "get_dynamic",
3558 "get_initial_device",
3559 "get_level",
3560 "get_max_active_levels",
3561 "get_max_task_priority",
3562 "get_max_threads",
3563 "get_nested",
3564 "get_num_devices",
3565 "get_num_places",
3566 "get_num_procs",
3567 "get_num_teams",
3568 "get_num_threads",
3569 "get_partition_num_places",
3570 "get_place_num",
3571 "get_proc_bind",
3572 "get_team_num",
3573 "get_thread_limit",
3574 "get_thread_num",
3575 "get_wtick",
3576 "get_wtime",
3577 "in_final",
3578 "in_parallel",
3579 "init_lock",
3580 "init_nest_lock",
3581 "is_initial_device",
3582 "pause_resource",
3583 "pause_resource_all",
3584 "set_affinity_format",
3585 "set_lock",
3586 "set_nest_lock",
3587 "test_lock",
3588 "test_nest_lock",
3589 "unset_lock",
3590 "unset_nest_lock",
3591 NULL,
3592 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3593 "get_ancestor_thread_num",
3594 "get_partition_place_nums",
3595 "get_place_num_procs",
3596 "get_place_proc_ids",
3597 "get_schedule",
3598 "get_team_size",
3599 "set_default_device",
3600 "set_dynamic",
3601 "set_max_active_levels",
3602 "set_nested",
3603 "set_num_threads",
3604 "set_schedule"
3607 int mode = 0;
3608 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3610 if (omp_runtime_apis[i] == NULL)
3612 mode++;
3613 continue;
3615 size_t len = strlen (omp_runtime_apis[i]);
3616 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3617 && (name[4 + len] == '\0'
3618 || (mode > 0
3619 && name[4 + len] == '_'
3620 && (name[4 + len + 1] == '\0'
3621 || (mode > 1
3622 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3623 return true;
3625 return false;
3628 /* Helper function for scan_omp.
3630 Callback for walk_gimple_stmt used to scan for OMP directives in
3631 the current statement in GSI. */
3633 static tree
3634 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3635 struct walk_stmt_info *wi)
3637 gimple *stmt = gsi_stmt (*gsi);
3638 omp_context *ctx = (omp_context *) wi->info;
3640 if (gimple_has_location (stmt))
3641 input_location = gimple_location (stmt);
3643 /* Check the nesting restrictions. */
3644 bool remove = false;
3645 if (is_gimple_omp (stmt))
3646 remove = !check_omp_nesting_restrictions (stmt, ctx);
3647 else if (is_gimple_call (stmt))
3649 tree fndecl = gimple_call_fndecl (stmt);
3650 if (fndecl)
3652 if (ctx
3653 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3654 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3655 && setjmp_or_longjmp_p (fndecl)
3656 && !ctx->loop_p)
3658 remove = true;
3659 error_at (gimple_location (stmt),
3660 "setjmp/longjmp inside %<simd%> construct");
3662 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3663 switch (DECL_FUNCTION_CODE (fndecl))
3665 case BUILT_IN_GOMP_BARRIER:
3666 case BUILT_IN_GOMP_CANCEL:
3667 case BUILT_IN_GOMP_CANCELLATION_POINT:
3668 case BUILT_IN_GOMP_TASKYIELD:
3669 case BUILT_IN_GOMP_TASKWAIT:
3670 case BUILT_IN_GOMP_TASKGROUP_START:
3671 case BUILT_IN_GOMP_TASKGROUP_END:
3672 remove = !check_omp_nesting_restrictions (stmt, ctx);
3673 break;
3674 default:
3675 break;
3677 else if (ctx)
3679 omp_context *octx = ctx;
3680 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3681 octx = ctx->outer;
3682 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3684 remove = true;
3685 error_at (gimple_location (stmt),
3686 "OpenMP runtime API call %qD in a region with "
3687 "%<order(concurrent)%> clause", fndecl);
3692 if (remove)
3694 stmt = gimple_build_nop ();
3695 gsi_replace (gsi, stmt, false);
3698 *handled_ops_p = true;
3700 switch (gimple_code (stmt))
3702 case GIMPLE_OMP_PARALLEL:
3703 taskreg_nesting_level++;
3704 scan_omp_parallel (gsi, ctx);
3705 taskreg_nesting_level--;
3706 break;
3708 case GIMPLE_OMP_TASK:
3709 taskreg_nesting_level++;
3710 scan_omp_task (gsi, ctx);
3711 taskreg_nesting_level--;
3712 break;
3714 case GIMPLE_OMP_FOR:
3715 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3716 == GF_OMP_FOR_KIND_SIMD)
3717 && gimple_omp_for_combined_into_p (stmt)
3718 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3720 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3721 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3722 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3724 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3725 break;
3728 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3729 == GF_OMP_FOR_KIND_SIMD)
3730 && omp_maybe_offloaded_ctx (ctx)
3731 && omp_max_simt_vf ())
3732 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3733 else
3734 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3735 break;
3737 case GIMPLE_OMP_SECTIONS:
3738 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3739 break;
3741 case GIMPLE_OMP_SINGLE:
3742 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3743 break;
3745 case GIMPLE_OMP_SCAN:
3746 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3748 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3749 ctx->scan_inclusive = true;
3750 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3751 ctx->scan_exclusive = true;
3753 /* FALLTHRU */
3754 case GIMPLE_OMP_SECTION:
3755 case GIMPLE_OMP_MASTER:
3756 case GIMPLE_OMP_ORDERED:
3757 case GIMPLE_OMP_CRITICAL:
3758 ctx = new_omp_context (stmt, ctx);
3759 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3760 break;
3762 case GIMPLE_OMP_TASKGROUP:
3763 ctx = new_omp_context (stmt, ctx);
3764 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3765 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3766 break;
3768 case GIMPLE_OMP_TARGET:
3769 if (is_gimple_omp_offloaded (stmt))
3771 taskreg_nesting_level++;
3772 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3773 taskreg_nesting_level--;
3775 else
3776 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3777 break;
3779 case GIMPLE_OMP_TEAMS:
3780 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3782 taskreg_nesting_level++;
3783 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3784 taskreg_nesting_level--;
3786 else
3787 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3788 break;
3790 case GIMPLE_BIND:
3792 tree var;
3794 *handled_ops_p = false;
3795 if (ctx)
3796 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3797 var ;
3798 var = DECL_CHAIN (var))
3799 insert_decl_map (&ctx->cb, var, var);
3801 break;
3802 default:
3803 *handled_ops_p = false;
3804 break;
3807 return NULL_TREE;
3811 /* Scan all the statements starting at the current statement. CTX
3812 contains context information about the OMP directives and
3813 clauses found during the scan. */
3815 static void
3816 scan_omp (gimple_seq *body_p, omp_context *ctx)
3818 location_t saved_location;
3819 struct walk_stmt_info wi;
3821 memset (&wi, 0, sizeof (wi));
3822 wi.info = ctx;
3823 wi.want_locations = true;
3825 saved_location = input_location;
3826 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3827 input_location = saved_location;
3830 /* Re-gimplification and code generation routines. */
3832 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3833 of BIND if in a method. */
3835 static void
3836 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3838 if (DECL_ARGUMENTS (current_function_decl)
3839 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3840 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3841 == POINTER_TYPE))
3843 tree vars = gimple_bind_vars (bind);
3844 for (tree *pvar = &vars; *pvar; )
3845 if (omp_member_access_dummy_var (*pvar))
3846 *pvar = DECL_CHAIN (*pvar);
3847 else
3848 pvar = &DECL_CHAIN (*pvar);
3849 gimple_bind_set_vars (bind, vars);
3853 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3854 block and its subblocks. */
3856 static void
3857 remove_member_access_dummy_vars (tree block)
3859 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3860 if (omp_member_access_dummy_var (*pvar))
3861 *pvar = DECL_CHAIN (*pvar);
3862 else
3863 pvar = &DECL_CHAIN (*pvar);
3865 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3866 remove_member_access_dummy_vars (block);
3869 /* If a context was created for STMT when it was scanned, return it. */
3871 static omp_context *
3872 maybe_lookup_ctx (gimple *stmt)
3874 splay_tree_node n;
3875 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3876 return n ? (omp_context *) n->value : NULL;
3880 /* Find the mapping for DECL in CTX or the immediately enclosing
3881 context that has a mapping for DECL.
3883 If CTX is a nested parallel directive, we may have to use the decl
3884 mappings created in CTX's parent context. Suppose that we have the
3885 following parallel nesting (variable UIDs showed for clarity):
3887 iD.1562 = 0;
3888 #omp parallel shared(iD.1562) -> outer parallel
3889 iD.1562 = iD.1562 + 1;
3891 #omp parallel shared (iD.1562) -> inner parallel
3892 iD.1562 = iD.1562 - 1;
3894 Each parallel structure will create a distinct .omp_data_s structure
3895 for copying iD.1562 in/out of the directive:
3897 outer parallel .omp_data_s.1.i -> iD.1562
3898 inner parallel .omp_data_s.2.i -> iD.1562
3900 A shared variable mapping will produce a copy-out operation before
3901 the parallel directive and a copy-in operation after it. So, in
3902 this case we would have:
3904 iD.1562 = 0;
3905 .omp_data_o.1.i = iD.1562;
3906 #omp parallel shared(iD.1562) -> outer parallel
3907 .omp_data_i.1 = &.omp_data_o.1
3908 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3910 .omp_data_o.2.i = iD.1562; -> **
3911 #omp parallel shared(iD.1562) -> inner parallel
3912 .omp_data_i.2 = &.omp_data_o.2
3913 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3916 ** This is a problem. The symbol iD.1562 cannot be referenced
3917 inside the body of the outer parallel region. But since we are
3918 emitting this copy operation while expanding the inner parallel
3919 directive, we need to access the CTX structure of the outer
3920 parallel directive to get the correct mapping:
3922 .omp_data_o.2.i = .omp_data_i.1->i
3924 Since there may be other workshare or parallel directives enclosing
3925 the parallel directive, it may be necessary to walk up the context
3926 parent chain. This is not a problem in general because nested
3927 parallelism happens only rarely. */
3929 static tree
3930 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3932 tree t;
3933 omp_context *up;
3935 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3936 t = maybe_lookup_decl (decl, up);
3938 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3940 return t ? t : decl;
3944 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3945 in outer contexts. */
3947 static tree
3948 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3950 tree t = NULL;
3951 omp_context *up;
3953 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3954 t = maybe_lookup_decl (decl, up);
3956 return t ? t : decl;
3960 /* Construct the initialization value for reduction operation OP. */
3962 tree
3963 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3965 switch (op)
3967 case PLUS_EXPR:
3968 case MINUS_EXPR:
3969 case BIT_IOR_EXPR:
3970 case BIT_XOR_EXPR:
3971 case TRUTH_OR_EXPR:
3972 case TRUTH_ORIF_EXPR:
3973 case TRUTH_XOR_EXPR:
3974 case NE_EXPR:
3975 return build_zero_cst (type);
3977 case MULT_EXPR:
3978 case TRUTH_AND_EXPR:
3979 case TRUTH_ANDIF_EXPR:
3980 case EQ_EXPR:
3981 return fold_convert_loc (loc, type, integer_one_node);
3983 case BIT_AND_EXPR:
3984 return fold_convert_loc (loc, type, integer_minus_one_node);
3986 case MAX_EXPR:
3987 if (SCALAR_FLOAT_TYPE_P (type))
3989 REAL_VALUE_TYPE max, min;
3990 if (HONOR_INFINITIES (type))
3992 real_inf (&max);
3993 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3995 else
3996 real_maxval (&min, 1, TYPE_MODE (type));
3997 return build_real (type, min);
3999 else if (POINTER_TYPE_P (type))
4001 wide_int min
4002 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4003 return wide_int_to_tree (type, min);
4005 else
4007 gcc_assert (INTEGRAL_TYPE_P (type));
4008 return TYPE_MIN_VALUE (type);
4011 case MIN_EXPR:
4012 if (SCALAR_FLOAT_TYPE_P (type))
4014 REAL_VALUE_TYPE max;
4015 if (HONOR_INFINITIES (type))
4016 real_inf (&max);
4017 else
4018 real_maxval (&max, 0, TYPE_MODE (type));
4019 return build_real (type, max);
4021 else if (POINTER_TYPE_P (type))
4023 wide_int max
4024 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4025 return wide_int_to_tree (type, max);
4027 else
4029 gcc_assert (INTEGRAL_TYPE_P (type));
4030 return TYPE_MAX_VALUE (type);
4033 default:
4034 gcc_unreachable ();
4038 /* Construct the initialization value for reduction CLAUSE. */
4040 tree
4041 omp_reduction_init (tree clause, tree type)
4043 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4044 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4047 /* Return alignment to be assumed for var in CLAUSE, which should be
4048 OMP_CLAUSE_ALIGNED. */
4050 static tree
4051 omp_clause_aligned_alignment (tree clause)
4053 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4054 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4056 /* Otherwise return implementation defined alignment. */
4057 unsigned int al = 1;
4058 opt_scalar_mode mode_iter;
4059 auto_vector_modes modes;
4060 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4061 static enum mode_class classes[]
4062 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4063 for (int i = 0; i < 4; i += 2)
4064 /* The for loop above dictates that we only walk through scalar classes. */
4065 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4067 scalar_mode mode = mode_iter.require ();
4068 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4069 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4070 continue;
4071 machine_mode alt_vmode;
4072 for (unsigned int j = 0; j < modes.length (); ++j)
4073 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4074 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4075 vmode = alt_vmode;
4077 tree type = lang_hooks.types.type_for_mode (mode, 1);
4078 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4079 continue;
4080 type = build_vector_type_for_mode (type, vmode);
4081 if (TYPE_MODE (type) != vmode)
4082 continue;
4083 if (TYPE_ALIGN_UNIT (type) > al)
4084 al = TYPE_ALIGN_UNIT (type);
4086 return build_int_cst (integer_type_node, al);
4090 /* This structure is part of the interface between lower_rec_simd_input_clauses
4091 and lower_rec_input_clauses. */
4093 class omplow_simd_context {
4094 public:
4095 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4096 tree idx;
4097 tree lane;
4098 tree lastlane;
4099 vec<tree, va_heap> simt_eargs;
4100 gimple_seq simt_dlist;
4101 poly_uint64_pod max_vf;
4102 bool is_simt;
4105 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4106 privatization. */
4108 static bool
4109 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4110 omplow_simd_context *sctx, tree &ivar,
4111 tree &lvar, tree *rvar = NULL,
4112 tree *rvar2 = NULL)
4114 if (known_eq (sctx->max_vf, 0U))
4116 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4117 if (maybe_gt (sctx->max_vf, 1U))
4119 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4120 OMP_CLAUSE_SAFELEN);
4121 if (c)
4123 poly_uint64 safe_len;
4124 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4125 || maybe_lt (safe_len, 1U))
4126 sctx->max_vf = 1;
4127 else
4128 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4131 if (maybe_gt (sctx->max_vf, 1U))
4133 sctx->idx = create_tmp_var (unsigned_type_node);
4134 sctx->lane = create_tmp_var (unsigned_type_node);
4137 if (known_eq (sctx->max_vf, 1U))
4138 return false;
4140 if (sctx->is_simt)
4142 if (is_gimple_reg (new_var))
4144 ivar = lvar = new_var;
4145 return true;
4147 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4148 ivar = lvar = create_tmp_var (type);
4149 TREE_ADDRESSABLE (ivar) = 1;
4150 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4151 NULL, DECL_ATTRIBUTES (ivar));
4152 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4153 tree clobber = build_clobber (type);
4154 gimple *g = gimple_build_assign (ivar, clobber);
4155 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4157 else
4159 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4160 tree avar = create_tmp_var_raw (atype);
4161 if (TREE_ADDRESSABLE (new_var))
4162 TREE_ADDRESSABLE (avar) = 1;
4163 DECL_ATTRIBUTES (avar)
4164 = tree_cons (get_identifier ("omp simd array"), NULL,
4165 DECL_ATTRIBUTES (avar));
4166 gimple_add_tmp_var (avar);
4167 tree iavar = avar;
4168 if (rvar && !ctx->for_simd_scan_phase)
4170 /* For inscan reductions, create another array temporary,
4171 which will hold the reduced value. */
4172 iavar = create_tmp_var_raw (atype);
4173 if (TREE_ADDRESSABLE (new_var))
4174 TREE_ADDRESSABLE (iavar) = 1;
4175 DECL_ATTRIBUTES (iavar)
4176 = tree_cons (get_identifier ("omp simd array"), NULL,
4177 tree_cons (get_identifier ("omp simd inscan"), NULL,
4178 DECL_ATTRIBUTES (iavar)));
4179 gimple_add_tmp_var (iavar);
4180 ctx->cb.decl_map->put (avar, iavar);
4181 if (sctx->lastlane == NULL_TREE)
4182 sctx->lastlane = create_tmp_var (unsigned_type_node);
4183 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4184 sctx->lastlane, NULL_TREE, NULL_TREE);
4185 TREE_THIS_NOTRAP (*rvar) = 1;
4187 if (ctx->scan_exclusive)
4189 /* And for exclusive scan yet another one, which will
4190 hold the value during the scan phase. */
4191 tree savar = create_tmp_var_raw (atype);
4192 if (TREE_ADDRESSABLE (new_var))
4193 TREE_ADDRESSABLE (savar) = 1;
4194 DECL_ATTRIBUTES (savar)
4195 = tree_cons (get_identifier ("omp simd array"), NULL,
4196 tree_cons (get_identifier ("omp simd inscan "
4197 "exclusive"), NULL,
4198 DECL_ATTRIBUTES (savar)));
4199 gimple_add_tmp_var (savar);
4200 ctx->cb.decl_map->put (iavar, savar);
4201 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4202 sctx->idx, NULL_TREE, NULL_TREE);
4203 TREE_THIS_NOTRAP (*rvar2) = 1;
4206 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4207 NULL_TREE, NULL_TREE);
4208 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4209 NULL_TREE, NULL_TREE);
4210 TREE_THIS_NOTRAP (ivar) = 1;
4211 TREE_THIS_NOTRAP (lvar) = 1;
4213 if (DECL_P (new_var))
4215 SET_DECL_VALUE_EXPR (new_var, lvar);
4216 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4218 return true;
4221 /* Helper function of lower_rec_input_clauses. For a reference
4222 in simd reduction, add an underlying variable it will reference. */
4224 static void
4225 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4227 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4228 if (TREE_CONSTANT (z))
4230 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4231 get_name (new_vard));
4232 gimple_add_tmp_var (z);
4233 TREE_ADDRESSABLE (z) = 1;
4234 z = build_fold_addr_expr_loc (loc, z);
4235 gimplify_assign (new_vard, z, ilist);
4239 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4240 code to emit (type) (tskred_temp[idx]). */
4242 static tree
4243 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4244 unsigned idx)
4246 unsigned HOST_WIDE_INT sz
4247 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4248 tree r = build2 (MEM_REF, pointer_sized_int_node,
4249 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4250 idx * sz));
4251 tree v = create_tmp_var (pointer_sized_int_node);
4252 gimple *g = gimple_build_assign (v, r);
4253 gimple_seq_add_stmt (ilist, g);
4254 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4256 v = create_tmp_var (type);
4257 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4258 gimple_seq_add_stmt (ilist, g);
4260 return v;
4263 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4264 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4265 private variables. Initialization statements go in ILIST, while calls
4266 to destructors go in DLIST. */
4268 static void
4269 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4270 omp_context *ctx, struct omp_for_data *fd)
4272 tree c, copyin_seq, x, ptr;
4273 bool copyin_by_ref = false;
4274 bool lastprivate_firstprivate = false;
4275 bool reduction_omp_orig_ref = false;
4276 int pass;
4277 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4278 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4279 omplow_simd_context sctx = omplow_simd_context ();
4280 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4281 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4282 gimple_seq llist[4] = { };
4283 tree nonconst_simd_if = NULL_TREE;
4285 copyin_seq = NULL;
4286 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4288 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4289 with data sharing clauses referencing variable sized vars. That
4290 is unnecessarily hard to support and very unlikely to result in
4291 vectorized code anyway. */
4292 if (is_simd)
4293 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4294 switch (OMP_CLAUSE_CODE (c))
4296 case OMP_CLAUSE_LINEAR:
4297 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4298 sctx.max_vf = 1;
4299 /* FALLTHRU */
4300 case OMP_CLAUSE_PRIVATE:
4301 case OMP_CLAUSE_FIRSTPRIVATE:
4302 case OMP_CLAUSE_LASTPRIVATE:
4303 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4304 sctx.max_vf = 1;
4305 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4307 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4308 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4309 sctx.max_vf = 1;
4311 break;
4312 case OMP_CLAUSE_REDUCTION:
4313 case OMP_CLAUSE_IN_REDUCTION:
4314 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4315 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4316 sctx.max_vf = 1;
4317 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4319 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4320 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4321 sctx.max_vf = 1;
4323 break;
4324 case OMP_CLAUSE_IF:
4325 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4326 sctx.max_vf = 1;
4327 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4328 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4329 break;
4330 case OMP_CLAUSE_SIMDLEN:
4331 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4332 sctx.max_vf = 1;
4333 break;
4334 case OMP_CLAUSE__CONDTEMP_:
4335 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4336 if (sctx.is_simt)
4337 sctx.max_vf = 1;
4338 break;
4339 default:
4340 continue;
4343 /* Add a placeholder for simduid. */
4344 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4345 sctx.simt_eargs.safe_push (NULL_TREE);
4347 unsigned task_reduction_cnt = 0;
4348 unsigned task_reduction_cntorig = 0;
4349 unsigned task_reduction_cnt_full = 0;
4350 unsigned task_reduction_cntorig_full = 0;
4351 unsigned task_reduction_other_cnt = 0;
4352 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4353 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4354 /* Do all the fixed sized types in the first pass, and the variable sized
4355 types in the second pass. This makes sure that the scalar arguments to
4356 the variable sized types are processed before we use them in the
4357 variable sized operations. For task reductions we use 4 passes, in the
4358 first two we ignore them, in the third one gather arguments for
4359 GOMP_task_reduction_remap call and in the last pass actually handle
4360 the task reductions. */
4361 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4362 ? 4 : 2); ++pass)
4364 if (pass == 2 && task_reduction_cnt)
4366 tskred_atype
4367 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4368 + task_reduction_cntorig);
4369 tskred_avar = create_tmp_var_raw (tskred_atype);
4370 gimple_add_tmp_var (tskred_avar);
4371 TREE_ADDRESSABLE (tskred_avar) = 1;
4372 task_reduction_cnt_full = task_reduction_cnt;
4373 task_reduction_cntorig_full = task_reduction_cntorig;
4375 else if (pass == 3 && task_reduction_cnt)
4377 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4378 gimple *g
4379 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4380 size_int (task_reduction_cntorig),
4381 build_fold_addr_expr (tskred_avar));
4382 gimple_seq_add_stmt (ilist, g);
4384 if (pass == 3 && task_reduction_other_cnt)
4386 /* For reduction clauses, build
4387 tskred_base = (void *) tskred_temp[2]
4388 + omp_get_thread_num () * tskred_temp[1]
4389 or if tskred_temp[1] is known to be constant, that constant
4390 directly. This is the start of the private reduction copy block
4391 for the current thread. */
4392 tree v = create_tmp_var (integer_type_node);
4393 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4394 gimple *g = gimple_build_call (x, 0);
4395 gimple_call_set_lhs (g, v);
4396 gimple_seq_add_stmt (ilist, g);
4397 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4398 tskred_temp = OMP_CLAUSE_DECL (c);
4399 if (is_taskreg_ctx (ctx))
4400 tskred_temp = lookup_decl (tskred_temp, ctx);
4401 tree v2 = create_tmp_var (sizetype);
4402 g = gimple_build_assign (v2, NOP_EXPR, v);
4403 gimple_seq_add_stmt (ilist, g);
4404 if (ctx->task_reductions[0])
4405 v = fold_convert (sizetype, ctx->task_reductions[0]);
4406 else
4407 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4408 tree v3 = create_tmp_var (sizetype);
4409 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4410 gimple_seq_add_stmt (ilist, g);
4411 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4412 tskred_base = create_tmp_var (ptr_type_node);
4413 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4414 gimple_seq_add_stmt (ilist, g);
4416 task_reduction_cnt = 0;
4417 task_reduction_cntorig = 0;
4418 task_reduction_other_cnt = 0;
4419 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4421 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4422 tree var, new_var;
4423 bool by_ref;
4424 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4425 bool task_reduction_p = false;
4426 bool task_reduction_needs_orig_p = false;
4427 tree cond = NULL_TREE;
4429 switch (c_kind)
4431 case OMP_CLAUSE_PRIVATE:
4432 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4433 continue;
4434 break;
4435 case OMP_CLAUSE_SHARED:
4436 /* Ignore shared directives in teams construct inside
4437 of target construct. */
4438 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4439 && !is_host_teams_ctx (ctx))
4440 continue;
4441 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4443 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4444 || is_global_var (OMP_CLAUSE_DECL (c)));
4445 continue;
4447 case OMP_CLAUSE_FIRSTPRIVATE:
4448 case OMP_CLAUSE_COPYIN:
4449 break;
4450 case OMP_CLAUSE_LINEAR:
4451 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4452 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4453 lastprivate_firstprivate = true;
4454 break;
4455 case OMP_CLAUSE_REDUCTION:
4456 case OMP_CLAUSE_IN_REDUCTION:
4457 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4459 task_reduction_p = true;
4460 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4462 task_reduction_other_cnt++;
4463 if (pass == 2)
4464 continue;
4466 else
4467 task_reduction_cnt++;
4468 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4470 var = OMP_CLAUSE_DECL (c);
4471 /* If var is a global variable that isn't privatized
4472 in outer contexts, we don't need to look up the
4473 original address, it is always the address of the
4474 global variable itself. */
4475 if (!DECL_P (var)
4476 || omp_is_reference (var)
4477 || !is_global_var
4478 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4480 task_reduction_needs_orig_p = true;
4481 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4482 task_reduction_cntorig++;
4486 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4487 reduction_omp_orig_ref = true;
4488 break;
4489 case OMP_CLAUSE__REDUCTEMP_:
4490 if (!is_taskreg_ctx (ctx))
4491 continue;
4492 /* FALLTHRU */
4493 case OMP_CLAUSE__LOOPTEMP_:
4494 /* Handle _looptemp_/_reductemp_ clauses only on
4495 parallel/task. */
4496 if (fd)
4497 continue;
4498 break;
4499 case OMP_CLAUSE_LASTPRIVATE:
4500 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4502 lastprivate_firstprivate = true;
4503 if (pass != 0 || is_taskloop_ctx (ctx))
4504 continue;
4506 /* Even without corresponding firstprivate, if
4507 decl is Fortran allocatable, it needs outer var
4508 reference. */
4509 else if (pass == 0
4510 && lang_hooks.decls.omp_private_outer_ref
4511 (OMP_CLAUSE_DECL (c)))
4512 lastprivate_firstprivate = true;
4513 break;
4514 case OMP_CLAUSE_ALIGNED:
4515 if (pass != 1)
4516 continue;
4517 var = OMP_CLAUSE_DECL (c);
4518 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4519 && !is_global_var (var))
4521 new_var = maybe_lookup_decl (var, ctx);
4522 if (new_var == NULL_TREE)
4523 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4524 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4525 tree alarg = omp_clause_aligned_alignment (c);
4526 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4527 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4528 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4529 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4530 gimplify_and_add (x, ilist);
4532 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4533 && is_global_var (var))
4535 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4536 new_var = lookup_decl (var, ctx);
4537 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4538 t = build_fold_addr_expr_loc (clause_loc, t);
4539 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4540 tree alarg = omp_clause_aligned_alignment (c);
4541 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4542 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4543 t = fold_convert_loc (clause_loc, ptype, t);
4544 x = create_tmp_var (ptype);
4545 t = build2 (MODIFY_EXPR, ptype, x, t);
4546 gimplify_and_add (t, ilist);
4547 t = build_simple_mem_ref_loc (clause_loc, x);
4548 SET_DECL_VALUE_EXPR (new_var, t);
4549 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4551 continue;
4552 case OMP_CLAUSE__CONDTEMP_:
4553 if (is_parallel_ctx (ctx)
4554 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4555 break;
4556 continue;
4557 default:
4558 continue;
4561 if (task_reduction_p != (pass >= 2))
4562 continue;
4564 new_var = var = OMP_CLAUSE_DECL (c);
4565 if ((c_kind == OMP_CLAUSE_REDUCTION
4566 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4567 && TREE_CODE (var) == MEM_REF)
4569 var = TREE_OPERAND (var, 0);
4570 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4571 var = TREE_OPERAND (var, 0);
4572 if (TREE_CODE (var) == INDIRECT_REF
4573 || TREE_CODE (var) == ADDR_EXPR)
4574 var = TREE_OPERAND (var, 0);
4575 if (is_variable_sized (var))
4577 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4578 var = DECL_VALUE_EXPR (var);
4579 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4580 var = TREE_OPERAND (var, 0);
4581 gcc_assert (DECL_P (var));
4583 new_var = var;
4585 if (c_kind != OMP_CLAUSE_COPYIN)
4586 new_var = lookup_decl (var, ctx);
4588 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4590 if (pass != 0)
4591 continue;
4593 /* C/C++ array section reductions. */
4594 else if ((c_kind == OMP_CLAUSE_REDUCTION
4595 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4596 && var != OMP_CLAUSE_DECL (c))
4598 if (pass == 0)
4599 continue;
4601 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4602 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4604 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4606 tree b = TREE_OPERAND (orig_var, 1);
4607 b = maybe_lookup_decl (b, ctx);
4608 if (b == NULL)
4610 b = TREE_OPERAND (orig_var, 1);
4611 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4613 if (integer_zerop (bias))
4614 bias = b;
4615 else
4617 bias = fold_convert_loc (clause_loc,
4618 TREE_TYPE (b), bias);
4619 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4620 TREE_TYPE (b), b, bias);
4622 orig_var = TREE_OPERAND (orig_var, 0);
4624 if (pass == 2)
4626 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4627 if (is_global_var (out)
4628 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4629 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4630 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4631 != POINTER_TYPE)))
4632 x = var;
4633 else
4635 bool by_ref = use_pointer_for_field (var, NULL);
4636 x = build_receiver_ref (var, by_ref, ctx);
4637 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4638 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4639 == POINTER_TYPE))
4640 x = build_fold_addr_expr (x);
4642 if (TREE_CODE (orig_var) == INDIRECT_REF)
4643 x = build_simple_mem_ref (x);
4644 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4646 if (var == TREE_OPERAND (orig_var, 0))
4647 x = build_fold_addr_expr (x);
4649 bias = fold_convert (sizetype, bias);
4650 x = fold_convert (ptr_type_node, x);
4651 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4652 TREE_TYPE (x), x, bias);
4653 unsigned cnt = task_reduction_cnt - 1;
4654 if (!task_reduction_needs_orig_p)
4655 cnt += (task_reduction_cntorig_full
4656 - task_reduction_cntorig);
4657 else
4658 cnt = task_reduction_cntorig - 1;
4659 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4660 size_int (cnt), NULL_TREE, NULL_TREE);
4661 gimplify_assign (r, x, ilist);
4662 continue;
4665 if (TREE_CODE (orig_var) == INDIRECT_REF
4666 || TREE_CODE (orig_var) == ADDR_EXPR)
4667 orig_var = TREE_OPERAND (orig_var, 0);
4668 tree d = OMP_CLAUSE_DECL (c);
4669 tree type = TREE_TYPE (d);
4670 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4671 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4672 const char *name = get_name (orig_var);
4673 if (pass == 3)
4675 tree xv = create_tmp_var (ptr_type_node);
4676 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4678 unsigned cnt = task_reduction_cnt - 1;
4679 if (!task_reduction_needs_orig_p)
4680 cnt += (task_reduction_cntorig_full
4681 - task_reduction_cntorig);
4682 else
4683 cnt = task_reduction_cntorig - 1;
4684 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4685 size_int (cnt), NULL_TREE, NULL_TREE);
4687 gimple *g = gimple_build_assign (xv, x);
4688 gimple_seq_add_stmt (ilist, g);
4690 else
4692 unsigned int idx = *ctx->task_reduction_map->get (c);
4693 tree off;
4694 if (ctx->task_reductions[1 + idx])
4695 off = fold_convert (sizetype,
4696 ctx->task_reductions[1 + idx]);
4697 else
4698 off = task_reduction_read (ilist, tskred_temp, sizetype,
4699 7 + 3 * idx + 1);
4700 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4701 tskred_base, off);
4702 gimple_seq_add_stmt (ilist, g);
4704 x = fold_convert (build_pointer_type (boolean_type_node),
4705 xv);
4706 if (TREE_CONSTANT (v))
4707 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4708 TYPE_SIZE_UNIT (type));
4709 else
4711 tree t = maybe_lookup_decl (v, ctx);
4712 if (t)
4713 v = t;
4714 else
4715 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4716 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4717 fb_rvalue);
4718 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4719 TREE_TYPE (v), v,
4720 build_int_cst (TREE_TYPE (v), 1));
4721 t = fold_build2_loc (clause_loc, MULT_EXPR,
4722 TREE_TYPE (v), t,
4723 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4724 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4726 cond = create_tmp_var (TREE_TYPE (x));
4727 gimplify_assign (cond, x, ilist);
4728 x = xv;
4730 else if (TREE_CONSTANT (v))
4732 x = create_tmp_var_raw (type, name);
4733 gimple_add_tmp_var (x);
4734 TREE_ADDRESSABLE (x) = 1;
4735 x = build_fold_addr_expr_loc (clause_loc, x);
4737 else
4739 tree atmp
4740 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4741 tree t = maybe_lookup_decl (v, ctx);
4742 if (t)
4743 v = t;
4744 else
4745 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4746 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4747 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4748 TREE_TYPE (v), v,
4749 build_int_cst (TREE_TYPE (v), 1));
4750 t = fold_build2_loc (clause_loc, MULT_EXPR,
4751 TREE_TYPE (v), t,
4752 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4753 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4754 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4757 tree ptype = build_pointer_type (TREE_TYPE (type));
4758 x = fold_convert_loc (clause_loc, ptype, x);
4759 tree y = create_tmp_var (ptype, name);
4760 gimplify_assign (y, x, ilist);
4761 x = y;
4762 tree yb = y;
4764 if (!integer_zerop (bias))
4766 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4767 bias);
4768 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4770 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4771 pointer_sized_int_node, yb, bias);
4772 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4773 yb = create_tmp_var (ptype, name);
4774 gimplify_assign (yb, x, ilist);
4775 x = yb;
4778 d = TREE_OPERAND (d, 0);
4779 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4780 d = TREE_OPERAND (d, 0);
4781 if (TREE_CODE (d) == ADDR_EXPR)
4783 if (orig_var != var)
4785 gcc_assert (is_variable_sized (orig_var));
4786 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4788 gimplify_assign (new_var, x, ilist);
4789 tree new_orig_var = lookup_decl (orig_var, ctx);
4790 tree t = build_fold_indirect_ref (new_var);
4791 DECL_IGNORED_P (new_var) = 0;
4792 TREE_THIS_NOTRAP (t) = 1;
4793 SET_DECL_VALUE_EXPR (new_orig_var, t);
4794 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4796 else
4798 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4799 build_int_cst (ptype, 0));
4800 SET_DECL_VALUE_EXPR (new_var, x);
4801 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4804 else
4806 gcc_assert (orig_var == var);
4807 if (TREE_CODE (d) == INDIRECT_REF)
4809 x = create_tmp_var (ptype, name);
4810 TREE_ADDRESSABLE (x) = 1;
4811 gimplify_assign (x, yb, ilist);
4812 x = build_fold_addr_expr_loc (clause_loc, x);
4814 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4815 gimplify_assign (new_var, x, ilist);
4817 /* GOMP_taskgroup_reduction_register memsets the whole
4818 array to zero. If the initializer is zero, we don't
4819 need to initialize it again, just mark it as ever
4820 used unconditionally, i.e. cond = true. */
4821 if (cond
4822 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4823 && initializer_zerop (omp_reduction_init (c,
4824 TREE_TYPE (type))))
4826 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4827 boolean_true_node);
4828 gimple_seq_add_stmt (ilist, g);
4829 continue;
4831 tree end = create_artificial_label (UNKNOWN_LOCATION);
4832 if (cond)
4834 gimple *g;
4835 if (!is_parallel_ctx (ctx))
4837 tree condv = create_tmp_var (boolean_type_node);
4838 g = gimple_build_assign (condv,
4839 build_simple_mem_ref (cond));
4840 gimple_seq_add_stmt (ilist, g);
4841 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4842 g = gimple_build_cond (NE_EXPR, condv,
4843 boolean_false_node, end, lab1);
4844 gimple_seq_add_stmt (ilist, g);
4845 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4847 g = gimple_build_assign (build_simple_mem_ref (cond),
4848 boolean_true_node);
4849 gimple_seq_add_stmt (ilist, g);
4852 tree y1 = create_tmp_var (ptype);
4853 gimplify_assign (y1, y, ilist);
4854 tree i2 = NULL_TREE, y2 = NULL_TREE;
4855 tree body2 = NULL_TREE, end2 = NULL_TREE;
4856 tree y3 = NULL_TREE, y4 = NULL_TREE;
4857 if (task_reduction_needs_orig_p)
4859 y3 = create_tmp_var (ptype);
4860 tree ref;
4861 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4862 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4863 size_int (task_reduction_cnt_full
4864 + task_reduction_cntorig - 1),
4865 NULL_TREE, NULL_TREE);
4866 else
4868 unsigned int idx = *ctx->task_reduction_map->get (c);
4869 ref = task_reduction_read (ilist, tskred_temp, ptype,
4870 7 + 3 * idx);
4872 gimplify_assign (y3, ref, ilist);
4874 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4876 if (pass != 3)
4878 y2 = create_tmp_var (ptype);
4879 gimplify_assign (y2, y, ilist);
4881 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4883 tree ref = build_outer_var_ref (var, ctx);
4884 /* For ref build_outer_var_ref already performs this. */
4885 if (TREE_CODE (d) == INDIRECT_REF)
4886 gcc_assert (omp_is_reference (var));
4887 else if (TREE_CODE (d) == ADDR_EXPR)
4888 ref = build_fold_addr_expr (ref);
4889 else if (omp_is_reference (var))
4890 ref = build_fold_addr_expr (ref);
4891 ref = fold_convert_loc (clause_loc, ptype, ref);
4892 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4893 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4895 y3 = create_tmp_var (ptype);
4896 gimplify_assign (y3, unshare_expr (ref), ilist);
4898 if (is_simd)
4900 y4 = create_tmp_var (ptype);
4901 gimplify_assign (y4, ref, dlist);
4905 tree i = create_tmp_var (TREE_TYPE (v));
4906 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4907 tree body = create_artificial_label (UNKNOWN_LOCATION);
4908 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4909 if (y2)
4911 i2 = create_tmp_var (TREE_TYPE (v));
4912 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4913 body2 = create_artificial_label (UNKNOWN_LOCATION);
4914 end2 = create_artificial_label (UNKNOWN_LOCATION);
4915 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4917 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4919 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4920 tree decl_placeholder
4921 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4922 SET_DECL_VALUE_EXPR (decl_placeholder,
4923 build_simple_mem_ref (y1));
4924 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4925 SET_DECL_VALUE_EXPR (placeholder,
4926 y3 ? build_simple_mem_ref (y3)
4927 : error_mark_node);
4928 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4929 x = lang_hooks.decls.omp_clause_default_ctor
4930 (c, build_simple_mem_ref (y1),
4931 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4932 if (x)
4933 gimplify_and_add (x, ilist);
4934 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4936 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4937 lower_omp (&tseq, ctx);
4938 gimple_seq_add_seq (ilist, tseq);
4940 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4941 if (is_simd)
4943 SET_DECL_VALUE_EXPR (decl_placeholder,
4944 build_simple_mem_ref (y2));
4945 SET_DECL_VALUE_EXPR (placeholder,
4946 build_simple_mem_ref (y4));
4947 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4948 lower_omp (&tseq, ctx);
4949 gimple_seq_add_seq (dlist, tseq);
4950 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4952 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4953 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4954 if (y2)
4956 x = lang_hooks.decls.omp_clause_dtor
4957 (c, build_simple_mem_ref (y2));
4958 if (x)
4959 gimplify_and_add (x, dlist);
4962 else
4964 x = omp_reduction_init (c, TREE_TYPE (type));
4965 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4967 /* reduction(-:var) sums up the partial results, so it
4968 acts identically to reduction(+:var). */
4969 if (code == MINUS_EXPR)
4970 code = PLUS_EXPR;
4972 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4973 if (is_simd)
4975 x = build2 (code, TREE_TYPE (type),
4976 build_simple_mem_ref (y4),
4977 build_simple_mem_ref (y2));
4978 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4981 gimple *g
4982 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4983 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4984 gimple_seq_add_stmt (ilist, g);
4985 if (y3)
4987 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4988 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4989 gimple_seq_add_stmt (ilist, g);
4991 g = gimple_build_assign (i, PLUS_EXPR, i,
4992 build_int_cst (TREE_TYPE (i), 1));
4993 gimple_seq_add_stmt (ilist, g);
4994 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4995 gimple_seq_add_stmt (ilist, g);
4996 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4997 if (y2)
4999 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5000 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5001 gimple_seq_add_stmt (dlist, g);
5002 if (y4)
5004 g = gimple_build_assign
5005 (y4, POINTER_PLUS_EXPR, y4,
5006 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5007 gimple_seq_add_stmt (dlist, g);
5009 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5010 build_int_cst (TREE_TYPE (i2), 1));
5011 gimple_seq_add_stmt (dlist, g);
5012 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5013 gimple_seq_add_stmt (dlist, g);
5014 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5016 continue;
5018 else if (pass == 2)
5020 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5021 x = var;
5022 else
5024 bool by_ref = use_pointer_for_field (var, ctx);
5025 x = build_receiver_ref (var, by_ref, ctx);
5027 if (!omp_is_reference (var))
5028 x = build_fold_addr_expr (x);
5029 x = fold_convert (ptr_type_node, x);
5030 unsigned cnt = task_reduction_cnt - 1;
5031 if (!task_reduction_needs_orig_p)
5032 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5033 else
5034 cnt = task_reduction_cntorig - 1;
5035 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5036 size_int (cnt), NULL_TREE, NULL_TREE);
5037 gimplify_assign (r, x, ilist);
5038 continue;
5040 else if (pass == 3)
5042 tree type = TREE_TYPE (new_var);
5043 if (!omp_is_reference (var))
5044 type = build_pointer_type (type);
5045 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5047 unsigned cnt = task_reduction_cnt - 1;
5048 if (!task_reduction_needs_orig_p)
5049 cnt += (task_reduction_cntorig_full
5050 - task_reduction_cntorig);
5051 else
5052 cnt = task_reduction_cntorig - 1;
5053 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5054 size_int (cnt), NULL_TREE, NULL_TREE);
5056 else
5058 unsigned int idx = *ctx->task_reduction_map->get (c);
5059 tree off;
5060 if (ctx->task_reductions[1 + idx])
5061 off = fold_convert (sizetype,
5062 ctx->task_reductions[1 + idx]);
5063 else
5064 off = task_reduction_read (ilist, tskred_temp, sizetype,
5065 7 + 3 * idx + 1);
5066 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5067 tskred_base, off);
5069 x = fold_convert (type, x);
5070 tree t;
5071 if (omp_is_reference (var))
5073 gimplify_assign (new_var, x, ilist);
5074 t = new_var;
5075 new_var = build_simple_mem_ref (new_var);
5077 else
5079 t = create_tmp_var (type);
5080 gimplify_assign (t, x, ilist);
5081 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5082 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5084 t = fold_convert (build_pointer_type (boolean_type_node), t);
5085 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5086 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5087 cond = create_tmp_var (TREE_TYPE (t));
5088 gimplify_assign (cond, t, ilist);
5090 else if (is_variable_sized (var))
5092 /* For variable sized types, we need to allocate the
5093 actual storage here. Call alloca and store the
5094 result in the pointer decl that we created elsewhere. */
5095 if (pass == 0)
5096 continue;
5098 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5100 gcall *stmt;
5101 tree tmp, atmp;
5103 ptr = DECL_VALUE_EXPR (new_var);
5104 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5105 ptr = TREE_OPERAND (ptr, 0);
5106 gcc_assert (DECL_P (ptr));
5107 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5109 /* void *tmp = __builtin_alloca */
5110 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5111 stmt = gimple_build_call (atmp, 2, x,
5112 size_int (DECL_ALIGN (var)));
5113 tmp = create_tmp_var_raw (ptr_type_node);
5114 gimple_add_tmp_var (tmp);
5115 gimple_call_set_lhs (stmt, tmp);
5117 gimple_seq_add_stmt (ilist, stmt);
5119 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5120 gimplify_assign (ptr, x, ilist);
5123 else if (omp_is_reference (var)
5124 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5125 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5127 /* For references that are being privatized for Fortran,
5128 allocate new backing storage for the new pointer
5129 variable. This allows us to avoid changing all the
5130 code that expects a pointer to something that expects
5131 a direct variable. */
5132 if (pass == 0)
5133 continue;
5135 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5136 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5138 x = build_receiver_ref (var, false, ctx);
5139 x = build_fold_addr_expr_loc (clause_loc, x);
5141 else if (TREE_CONSTANT (x))
5143 /* For reduction in SIMD loop, defer adding the
5144 initialization of the reference, because if we decide
5145 to use SIMD array for it, the initilization could cause
5146 expansion ICE. Ditto for other privatization clauses. */
5147 if (is_simd)
5148 x = NULL_TREE;
5149 else
5151 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5152 get_name (var));
5153 gimple_add_tmp_var (x);
5154 TREE_ADDRESSABLE (x) = 1;
5155 x = build_fold_addr_expr_loc (clause_loc, x);
5158 else
5160 tree atmp
5161 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5162 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5163 tree al = size_int (TYPE_ALIGN (rtype));
5164 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5167 if (x)
5169 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5170 gimplify_assign (new_var, x, ilist);
5173 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5175 else if ((c_kind == OMP_CLAUSE_REDUCTION
5176 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5177 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5179 if (pass == 0)
5180 continue;
5182 else if (pass != 0)
5183 continue;
5185 switch (OMP_CLAUSE_CODE (c))
5187 case OMP_CLAUSE_SHARED:
5188 /* Ignore shared directives in teams construct inside
5189 target construct. */
5190 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5191 && !is_host_teams_ctx (ctx))
5192 continue;
5193 /* Shared global vars are just accessed directly. */
5194 if (is_global_var (new_var))
5195 break;
5196 /* For taskloop firstprivate/lastprivate, represented
5197 as firstprivate and shared clause on the task, new_var
5198 is the firstprivate var. */
5199 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5200 break;
5201 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5202 needs to be delayed until after fixup_child_record_type so
5203 that we get the correct type during the dereference. */
5204 by_ref = use_pointer_for_field (var, ctx);
5205 x = build_receiver_ref (var, by_ref, ctx);
5206 SET_DECL_VALUE_EXPR (new_var, x);
5207 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5209 /* ??? If VAR is not passed by reference, and the variable
5210 hasn't been initialized yet, then we'll get a warning for
5211 the store into the omp_data_s structure. Ideally, we'd be
5212 able to notice this and not store anything at all, but
5213 we're generating code too early. Suppress the warning. */
5214 if (!by_ref)
5215 TREE_NO_WARNING (var) = 1;
5216 break;
5218 case OMP_CLAUSE__CONDTEMP_:
5219 if (is_parallel_ctx (ctx))
5221 x = build_receiver_ref (var, false, ctx);
5222 SET_DECL_VALUE_EXPR (new_var, x);
5223 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5225 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5227 x = build_zero_cst (TREE_TYPE (var));
5228 goto do_private;
5230 break;
5232 case OMP_CLAUSE_LASTPRIVATE:
5233 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5234 break;
5235 /* FALLTHRU */
5237 case OMP_CLAUSE_PRIVATE:
5238 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5239 x = build_outer_var_ref (var, ctx);
5240 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5242 if (is_task_ctx (ctx))
5243 x = build_receiver_ref (var, false, ctx);
5244 else
5245 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5247 else
5248 x = NULL;
5249 do_private:
5250 tree nx;
5251 bool copy_ctor;
5252 copy_ctor = false;
5253 nx = unshare_expr (new_var);
5254 if (is_simd
5255 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5256 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5257 copy_ctor = true;
5258 if (copy_ctor)
5259 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5260 else
5261 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5262 if (is_simd)
5264 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5265 if ((TREE_ADDRESSABLE (new_var) || nx || y
5266 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5267 && (gimple_omp_for_collapse (ctx->stmt) != 1
5268 || (gimple_omp_for_index (ctx->stmt, 0)
5269 != new_var)))
5270 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5271 || omp_is_reference (var))
5272 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5273 ivar, lvar))
5275 if (omp_is_reference (var))
5277 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5278 tree new_vard = TREE_OPERAND (new_var, 0);
5279 gcc_assert (DECL_P (new_vard));
5280 SET_DECL_VALUE_EXPR (new_vard,
5281 build_fold_addr_expr (lvar));
5282 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5285 if (nx)
5287 tree iv = unshare_expr (ivar);
5288 if (copy_ctor)
5289 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5291 else
5292 x = lang_hooks.decls.omp_clause_default_ctor (c,
5296 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5298 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5299 unshare_expr (ivar), x);
5300 nx = x;
5302 if (nx && x)
5303 gimplify_and_add (x, &llist[0]);
5304 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5305 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5307 tree v = new_var;
5308 if (!DECL_P (v))
5310 gcc_assert (TREE_CODE (v) == MEM_REF);
5311 v = TREE_OPERAND (v, 0);
5312 gcc_assert (DECL_P (v));
5314 v = *ctx->lastprivate_conditional_map->get (v);
5315 tree t = create_tmp_var (TREE_TYPE (v));
5316 tree z = build_zero_cst (TREE_TYPE (v));
5317 tree orig_v
5318 = build_outer_var_ref (var, ctx,
5319 OMP_CLAUSE_LASTPRIVATE);
5320 gimple_seq_add_stmt (dlist,
5321 gimple_build_assign (t, z));
5322 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5323 tree civar = DECL_VALUE_EXPR (v);
5324 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5325 civar = unshare_expr (civar);
5326 TREE_OPERAND (civar, 1) = sctx.idx;
5327 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5328 unshare_expr (civar));
5329 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5330 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5331 orig_v, unshare_expr (ivar)));
5332 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5333 civar);
5334 x = build3 (COND_EXPR, void_type_node, cond, x,
5335 void_node);
5336 gimple_seq tseq = NULL;
5337 gimplify_and_add (x, &tseq);
5338 if (ctx->outer)
5339 lower_omp (&tseq, ctx->outer);
5340 gimple_seq_add_seq (&llist[1], tseq);
5342 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5343 && ctx->for_simd_scan_phase)
5345 x = unshare_expr (ivar);
5346 tree orig_v
5347 = build_outer_var_ref (var, ctx,
5348 OMP_CLAUSE_LASTPRIVATE);
5349 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5350 orig_v);
5351 gimplify_and_add (x, &llist[0]);
5353 if (y)
5355 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5356 if (y)
5357 gimplify_and_add (y, &llist[1]);
5359 break;
5361 if (omp_is_reference (var))
5363 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5364 tree new_vard = TREE_OPERAND (new_var, 0);
5365 gcc_assert (DECL_P (new_vard));
5366 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5367 x = TYPE_SIZE_UNIT (type);
5368 if (TREE_CONSTANT (x))
5370 x = create_tmp_var_raw (type, get_name (var));
5371 gimple_add_tmp_var (x);
5372 TREE_ADDRESSABLE (x) = 1;
5373 x = build_fold_addr_expr_loc (clause_loc, x);
5374 x = fold_convert_loc (clause_loc,
5375 TREE_TYPE (new_vard), x);
5376 gimplify_assign (new_vard, x, ilist);
5380 if (nx)
5381 gimplify_and_add (nx, ilist);
5382 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5383 && is_simd
5384 && ctx->for_simd_scan_phase)
5386 tree orig_v = build_outer_var_ref (var, ctx,
5387 OMP_CLAUSE_LASTPRIVATE);
5388 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5389 orig_v);
5390 gimplify_and_add (x, ilist);
5392 /* FALLTHRU */
5394 do_dtor:
5395 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5396 if (x)
5397 gimplify_and_add (x, dlist);
5398 break;
5400 case OMP_CLAUSE_LINEAR:
5401 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5402 goto do_firstprivate;
5403 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5404 x = NULL;
5405 else
5406 x = build_outer_var_ref (var, ctx);
5407 goto do_private;
5409 case OMP_CLAUSE_FIRSTPRIVATE:
5410 if (is_task_ctx (ctx))
5412 if ((omp_is_reference (var)
5413 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5414 || is_variable_sized (var))
5415 goto do_dtor;
5416 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5417 ctx))
5418 || use_pointer_for_field (var, NULL))
5420 x = build_receiver_ref (var, false, ctx);
5421 SET_DECL_VALUE_EXPR (new_var, x);
5422 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5423 goto do_dtor;
5426 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5427 && omp_is_reference (var))
5429 x = build_outer_var_ref (var, ctx);
5430 gcc_assert (TREE_CODE (x) == MEM_REF
5431 && integer_zerop (TREE_OPERAND (x, 1)));
5432 x = TREE_OPERAND (x, 0);
5433 x = lang_hooks.decls.omp_clause_copy_ctor
5434 (c, unshare_expr (new_var), x);
5435 gimplify_and_add (x, ilist);
5436 goto do_dtor;
5438 do_firstprivate:
5439 x = build_outer_var_ref (var, ctx);
5440 if (is_simd)
5442 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5443 && gimple_omp_for_combined_into_p (ctx->stmt))
5445 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5446 tree stept = TREE_TYPE (t);
5447 tree ct = omp_find_clause (clauses,
5448 OMP_CLAUSE__LOOPTEMP_);
5449 gcc_assert (ct);
5450 tree l = OMP_CLAUSE_DECL (ct);
5451 tree n1 = fd->loop.n1;
5452 tree step = fd->loop.step;
5453 tree itype = TREE_TYPE (l);
5454 if (POINTER_TYPE_P (itype))
5455 itype = signed_type_for (itype);
5456 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5457 if (TYPE_UNSIGNED (itype)
5458 && fd->loop.cond_code == GT_EXPR)
5459 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5460 fold_build1 (NEGATE_EXPR, itype, l),
5461 fold_build1 (NEGATE_EXPR,
5462 itype, step));
5463 else
5464 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5465 t = fold_build2 (MULT_EXPR, stept,
5466 fold_convert (stept, l), t);
5468 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5470 if (omp_is_reference (var))
5472 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5473 tree new_vard = TREE_OPERAND (new_var, 0);
5474 gcc_assert (DECL_P (new_vard));
5475 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5476 nx = TYPE_SIZE_UNIT (type);
5477 if (TREE_CONSTANT (nx))
5479 nx = create_tmp_var_raw (type,
5480 get_name (var));
5481 gimple_add_tmp_var (nx);
5482 TREE_ADDRESSABLE (nx) = 1;
5483 nx = build_fold_addr_expr_loc (clause_loc,
5484 nx);
5485 nx = fold_convert_loc (clause_loc,
5486 TREE_TYPE (new_vard),
5487 nx);
5488 gimplify_assign (new_vard, nx, ilist);
5492 x = lang_hooks.decls.omp_clause_linear_ctor
5493 (c, new_var, x, t);
5494 gimplify_and_add (x, ilist);
5495 goto do_dtor;
5498 if (POINTER_TYPE_P (TREE_TYPE (x)))
5499 x = fold_build2 (POINTER_PLUS_EXPR,
5500 TREE_TYPE (x), x, t);
5501 else
5502 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5505 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5506 || TREE_ADDRESSABLE (new_var)
5507 || omp_is_reference (var))
5508 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5509 ivar, lvar))
5511 if (omp_is_reference (var))
5513 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5514 tree new_vard = TREE_OPERAND (new_var, 0);
5515 gcc_assert (DECL_P (new_vard));
5516 SET_DECL_VALUE_EXPR (new_vard,
5517 build_fold_addr_expr (lvar));
5518 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5520 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5522 tree iv = create_tmp_var (TREE_TYPE (new_var));
5523 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5524 gimplify_and_add (x, ilist);
5525 gimple_stmt_iterator gsi
5526 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5527 gassign *g
5528 = gimple_build_assign (unshare_expr (lvar), iv);
5529 gsi_insert_before_without_update (&gsi, g,
5530 GSI_SAME_STMT);
5531 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5532 enum tree_code code = PLUS_EXPR;
5533 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5534 code = POINTER_PLUS_EXPR;
5535 g = gimple_build_assign (iv, code, iv, t);
5536 gsi_insert_before_without_update (&gsi, g,
5537 GSI_SAME_STMT);
5538 break;
5540 x = lang_hooks.decls.omp_clause_copy_ctor
5541 (c, unshare_expr (ivar), x);
5542 gimplify_and_add (x, &llist[0]);
5543 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5544 if (x)
5545 gimplify_and_add (x, &llist[1]);
5546 break;
5548 if (omp_is_reference (var))
5550 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5551 tree new_vard = TREE_OPERAND (new_var, 0);
5552 gcc_assert (DECL_P (new_vard));
5553 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5554 nx = TYPE_SIZE_UNIT (type);
5555 if (TREE_CONSTANT (nx))
5557 nx = create_tmp_var_raw (type, get_name (var));
5558 gimple_add_tmp_var (nx);
5559 TREE_ADDRESSABLE (nx) = 1;
5560 nx = build_fold_addr_expr_loc (clause_loc, nx);
5561 nx = fold_convert_loc (clause_loc,
5562 TREE_TYPE (new_vard), nx);
5563 gimplify_assign (new_vard, nx, ilist);
5567 x = lang_hooks.decls.omp_clause_copy_ctor
5568 (c, unshare_expr (new_var), x);
5569 gimplify_and_add (x, ilist);
5570 goto do_dtor;
5572 case OMP_CLAUSE__LOOPTEMP_:
5573 case OMP_CLAUSE__REDUCTEMP_:
5574 gcc_assert (is_taskreg_ctx (ctx));
5575 x = build_outer_var_ref (var, ctx);
5576 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5577 gimplify_and_add (x, ilist);
5578 break;
5580 case OMP_CLAUSE_COPYIN:
5581 by_ref = use_pointer_for_field (var, NULL);
5582 x = build_receiver_ref (var, by_ref, ctx);
5583 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5584 append_to_statement_list (x, &copyin_seq);
5585 copyin_by_ref |= by_ref;
5586 break;
5588 case OMP_CLAUSE_REDUCTION:
5589 case OMP_CLAUSE_IN_REDUCTION:
5590 /* OpenACC reductions are initialized using the
5591 GOACC_REDUCTION internal function. */
5592 if (is_gimple_omp_oacc (ctx->stmt))
5593 break;
5594 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5596 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5597 gimple *tseq;
5598 tree ptype = TREE_TYPE (placeholder);
5599 if (cond)
5601 x = error_mark_node;
5602 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5603 && !task_reduction_needs_orig_p)
5604 x = var;
5605 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5607 tree pptype = build_pointer_type (ptype);
5608 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5609 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5610 size_int (task_reduction_cnt_full
5611 + task_reduction_cntorig - 1),
5612 NULL_TREE, NULL_TREE);
5613 else
5615 unsigned int idx
5616 = *ctx->task_reduction_map->get (c);
5617 x = task_reduction_read (ilist, tskred_temp,
5618 pptype, 7 + 3 * idx);
5620 x = fold_convert (pptype, x);
5621 x = build_simple_mem_ref (x);
5624 else
5626 x = build_outer_var_ref (var, ctx);
5628 if (omp_is_reference (var)
5629 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5630 x = build_fold_addr_expr_loc (clause_loc, x);
5632 SET_DECL_VALUE_EXPR (placeholder, x);
5633 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5634 tree new_vard = new_var;
5635 if (omp_is_reference (var))
5637 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5638 new_vard = TREE_OPERAND (new_var, 0);
5639 gcc_assert (DECL_P (new_vard));
5641 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5642 if (is_simd
5643 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5644 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5645 rvarp = &rvar;
5646 if (is_simd
5647 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5648 ivar, lvar, rvarp,
5649 &rvar2))
5651 if (new_vard == new_var)
5653 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5654 SET_DECL_VALUE_EXPR (new_var, ivar);
5656 else
5658 SET_DECL_VALUE_EXPR (new_vard,
5659 build_fold_addr_expr (ivar));
5660 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5662 x = lang_hooks.decls.omp_clause_default_ctor
5663 (c, unshare_expr (ivar),
5664 build_outer_var_ref (var, ctx));
5665 if (rvarp && ctx->for_simd_scan_phase)
5667 if (x)
5668 gimplify_and_add (x, &llist[0]);
5669 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5670 if (x)
5671 gimplify_and_add (x, &llist[1]);
5672 break;
5674 else if (rvarp)
5676 if (x)
5678 gimplify_and_add (x, &llist[0]);
5680 tree ivar2 = unshare_expr (lvar);
5681 TREE_OPERAND (ivar2, 1) = sctx.idx;
5682 x = lang_hooks.decls.omp_clause_default_ctor
5683 (c, ivar2, build_outer_var_ref (var, ctx));
5684 gimplify_and_add (x, &llist[0]);
5686 if (rvar2)
5688 x = lang_hooks.decls.omp_clause_default_ctor
5689 (c, unshare_expr (rvar2),
5690 build_outer_var_ref (var, ctx));
5691 gimplify_and_add (x, &llist[0]);
5694 /* For types that need construction, add another
5695 private var which will be default constructed
5696 and optionally initialized with
5697 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5698 loop we want to assign this value instead of
5699 constructing and destructing it in each
5700 iteration. */
5701 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5702 gimple_add_tmp_var (nv);
5703 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5704 ? rvar2
5705 : ivar, 0),
5706 nv);
5707 x = lang_hooks.decls.omp_clause_default_ctor
5708 (c, nv, build_outer_var_ref (var, ctx));
5709 gimplify_and_add (x, ilist);
5711 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5713 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5714 x = DECL_VALUE_EXPR (new_vard);
5715 tree vexpr = nv;
5716 if (new_vard != new_var)
5717 vexpr = build_fold_addr_expr (nv);
5718 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5719 lower_omp (&tseq, ctx);
5720 SET_DECL_VALUE_EXPR (new_vard, x);
5721 gimple_seq_add_seq (ilist, tseq);
5722 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5725 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5726 if (x)
5727 gimplify_and_add (x, dlist);
5730 tree ref = build_outer_var_ref (var, ctx);
5731 x = unshare_expr (ivar);
5732 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5733 ref);
5734 gimplify_and_add (x, &llist[0]);
5736 ref = build_outer_var_ref (var, ctx);
5737 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5738 rvar);
5739 gimplify_and_add (x, &llist[3]);
5741 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5742 if (new_vard == new_var)
5743 SET_DECL_VALUE_EXPR (new_var, lvar);
5744 else
5745 SET_DECL_VALUE_EXPR (new_vard,
5746 build_fold_addr_expr (lvar));
5748 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5749 if (x)
5750 gimplify_and_add (x, &llist[1]);
5752 tree ivar2 = unshare_expr (lvar);
5753 TREE_OPERAND (ivar2, 1) = sctx.idx;
5754 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5755 if (x)
5756 gimplify_and_add (x, &llist[1]);
5758 if (rvar2)
5760 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5761 if (x)
5762 gimplify_and_add (x, &llist[1]);
5764 break;
5766 if (x)
5767 gimplify_and_add (x, &llist[0]);
5768 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5770 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5771 lower_omp (&tseq, ctx);
5772 gimple_seq_add_seq (&llist[0], tseq);
5774 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5775 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5776 lower_omp (&tseq, ctx);
5777 gimple_seq_add_seq (&llist[1], tseq);
5778 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5779 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5780 if (new_vard == new_var)
5781 SET_DECL_VALUE_EXPR (new_var, lvar);
5782 else
5783 SET_DECL_VALUE_EXPR (new_vard,
5784 build_fold_addr_expr (lvar));
5785 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5786 if (x)
5787 gimplify_and_add (x, &llist[1]);
5788 break;
5790 /* If this is a reference to constant size reduction var
5791 with placeholder, we haven't emitted the initializer
5792 for it because it is undesirable if SIMD arrays are used.
5793 But if they aren't used, we need to emit the deferred
5794 initialization now. */
5795 else if (omp_is_reference (var) && is_simd)
5796 handle_simd_reference (clause_loc, new_vard, ilist);
5798 tree lab2 = NULL_TREE;
5799 if (cond)
5801 gimple *g;
5802 if (!is_parallel_ctx (ctx))
5804 tree condv = create_tmp_var (boolean_type_node);
5805 tree m = build_simple_mem_ref (cond);
5806 g = gimple_build_assign (condv, m);
5807 gimple_seq_add_stmt (ilist, g);
5808 tree lab1
5809 = create_artificial_label (UNKNOWN_LOCATION);
5810 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5811 g = gimple_build_cond (NE_EXPR, condv,
5812 boolean_false_node,
5813 lab2, lab1);
5814 gimple_seq_add_stmt (ilist, g);
5815 gimple_seq_add_stmt (ilist,
5816 gimple_build_label (lab1));
5818 g = gimple_build_assign (build_simple_mem_ref (cond),
5819 boolean_true_node);
5820 gimple_seq_add_stmt (ilist, g);
5822 x = lang_hooks.decls.omp_clause_default_ctor
5823 (c, unshare_expr (new_var),
5824 cond ? NULL_TREE
5825 : build_outer_var_ref (var, ctx));
5826 if (x)
5827 gimplify_and_add (x, ilist);
5829 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5830 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5832 if (ctx->for_simd_scan_phase)
5833 goto do_dtor;
5834 if (x || (!is_simd
5835 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5837 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5838 gimple_add_tmp_var (nv);
5839 ctx->cb.decl_map->put (new_vard, nv);
5840 x = lang_hooks.decls.omp_clause_default_ctor
5841 (c, nv, build_outer_var_ref (var, ctx));
5842 if (x)
5843 gimplify_and_add (x, ilist);
5844 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5846 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5847 tree vexpr = nv;
5848 if (new_vard != new_var)
5849 vexpr = build_fold_addr_expr (nv);
5850 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5851 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5852 lower_omp (&tseq, ctx);
5853 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5854 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5855 gimple_seq_add_seq (ilist, tseq);
5857 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5858 if (is_simd && ctx->scan_exclusive)
5860 tree nv2
5861 = create_tmp_var_raw (TREE_TYPE (new_var));
5862 gimple_add_tmp_var (nv2);
5863 ctx->cb.decl_map->put (nv, nv2);
5864 x = lang_hooks.decls.omp_clause_default_ctor
5865 (c, nv2, build_outer_var_ref (var, ctx));
5866 gimplify_and_add (x, ilist);
5867 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5868 if (x)
5869 gimplify_and_add (x, dlist);
5871 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5872 if (x)
5873 gimplify_and_add (x, dlist);
5875 else if (is_simd
5876 && ctx->scan_exclusive
5877 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5879 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5880 gimple_add_tmp_var (nv2);
5881 ctx->cb.decl_map->put (new_vard, nv2);
5882 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5883 if (x)
5884 gimplify_and_add (x, dlist);
5886 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5887 goto do_dtor;
5890 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5892 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5893 lower_omp (&tseq, ctx);
5894 gimple_seq_add_seq (ilist, tseq);
5896 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5897 if (is_simd)
5899 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5900 lower_omp (&tseq, ctx);
5901 gimple_seq_add_seq (dlist, tseq);
5902 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5904 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5905 if (cond)
5907 if (lab2)
5908 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5909 break;
5911 goto do_dtor;
5913 else
5915 x = omp_reduction_init (c, TREE_TYPE (new_var));
5916 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5917 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5919 if (cond)
5921 gimple *g;
5922 tree lab2 = NULL_TREE;
5923 /* GOMP_taskgroup_reduction_register memsets the whole
5924 array to zero. If the initializer is zero, we don't
5925 need to initialize it again, just mark it as ever
5926 used unconditionally, i.e. cond = true. */
5927 if (initializer_zerop (x))
5929 g = gimple_build_assign (build_simple_mem_ref (cond),
5930 boolean_true_node);
5931 gimple_seq_add_stmt (ilist, g);
5932 break;
5935 /* Otherwise, emit
5936 if (!cond) { cond = true; new_var = x; } */
5937 if (!is_parallel_ctx (ctx))
5939 tree condv = create_tmp_var (boolean_type_node);
5940 tree m = build_simple_mem_ref (cond);
5941 g = gimple_build_assign (condv, m);
5942 gimple_seq_add_stmt (ilist, g);
5943 tree lab1
5944 = create_artificial_label (UNKNOWN_LOCATION);
5945 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5946 g = gimple_build_cond (NE_EXPR, condv,
5947 boolean_false_node,
5948 lab2, lab1);
5949 gimple_seq_add_stmt (ilist, g);
5950 gimple_seq_add_stmt (ilist,
5951 gimple_build_label (lab1));
5953 g = gimple_build_assign (build_simple_mem_ref (cond),
5954 boolean_true_node);
5955 gimple_seq_add_stmt (ilist, g);
5956 gimplify_assign (new_var, x, ilist);
5957 if (lab2)
5958 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5959 break;
5962 /* reduction(-:var) sums up the partial results, so it
5963 acts identically to reduction(+:var). */
5964 if (code == MINUS_EXPR)
5965 code = PLUS_EXPR;
5967 tree new_vard = new_var;
5968 if (is_simd && omp_is_reference (var))
5970 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5971 new_vard = TREE_OPERAND (new_var, 0);
5972 gcc_assert (DECL_P (new_vard));
5974 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5975 if (is_simd
5976 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5977 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5978 rvarp = &rvar;
5979 if (is_simd
5980 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5981 ivar, lvar, rvarp,
5982 &rvar2))
5984 if (new_vard != new_var)
5986 SET_DECL_VALUE_EXPR (new_vard,
5987 build_fold_addr_expr (lvar));
5988 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5991 tree ref = build_outer_var_ref (var, ctx);
5993 if (rvarp)
5995 if (ctx->for_simd_scan_phase)
5996 break;
5997 gimplify_assign (ivar, ref, &llist[0]);
5998 ref = build_outer_var_ref (var, ctx);
5999 gimplify_assign (ref, rvar, &llist[3]);
6000 break;
6003 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6005 if (sctx.is_simt)
6007 if (!simt_lane)
6008 simt_lane = create_tmp_var (unsigned_type_node);
6009 x = build_call_expr_internal_loc
6010 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6011 TREE_TYPE (ivar), 2, ivar, simt_lane);
6012 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6013 gimplify_assign (ivar, x, &llist[2]);
6015 x = build2 (code, TREE_TYPE (ref), ref, ivar);
6016 ref = build_outer_var_ref (var, ctx);
6017 gimplify_assign (ref, x, &llist[1]);
6020 else
6022 if (omp_is_reference (var) && is_simd)
6023 handle_simd_reference (clause_loc, new_vard, ilist);
6024 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6025 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6026 break;
6027 gimplify_assign (new_var, x, ilist);
6028 if (is_simd)
6030 tree ref = build_outer_var_ref (var, ctx);
6032 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6033 ref = build_outer_var_ref (var, ctx);
6034 gimplify_assign (ref, x, dlist);
6038 break;
6040 default:
6041 gcc_unreachable ();
6045 if (tskred_avar)
6047 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6048 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6051 if (known_eq (sctx.max_vf, 1U))
6053 sctx.is_simt = false;
6054 if (ctx->lastprivate_conditional_map)
6056 if (gimple_omp_for_combined_into_p (ctx->stmt))
6058 /* Signal to lower_omp_1 that it should use parent context. */
6059 ctx->combined_into_simd_safelen1 = true;
6060 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6062 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6064 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6065 omp_context *outer = ctx->outer;
6066 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6067 outer = outer->outer;
6068 tree *v = ctx->lastprivate_conditional_map->get (o);
6069 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6070 tree *pv = outer->lastprivate_conditional_map->get (po);
6071 *v = *pv;
6074 else
6076 /* When not vectorized, treat lastprivate(conditional:) like
6077 normal lastprivate, as there will be just one simd lane
6078 writing the privatized variable. */
6079 delete ctx->lastprivate_conditional_map;
6080 ctx->lastprivate_conditional_map = NULL;
6085 if (nonconst_simd_if)
6087 if (sctx.lane == NULL_TREE)
6089 sctx.idx = create_tmp_var (unsigned_type_node);
6090 sctx.lane = create_tmp_var (unsigned_type_node);
6092 /* FIXME: For now. */
6093 sctx.is_simt = false;
6096 if (sctx.lane || sctx.is_simt)
6098 uid = create_tmp_var (ptr_type_node, "simduid");
6099 /* Don't want uninit warnings on simduid, it is always uninitialized,
6100 but we use it not for the value, but for the DECL_UID only. */
6101 TREE_NO_WARNING (uid) = 1;
6102 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6103 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6104 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6105 gimple_omp_for_set_clauses (ctx->stmt, c);
6107 /* Emit calls denoting privatized variables and initializing a pointer to
6108 structure that holds private variables as fields after ompdevlow pass. */
6109 if (sctx.is_simt)
6111 sctx.simt_eargs[0] = uid;
6112 gimple *g
6113 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6114 gimple_call_set_lhs (g, uid);
6115 gimple_seq_add_stmt (ilist, g);
6116 sctx.simt_eargs.release ();
6118 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6119 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6120 gimple_call_set_lhs (g, simtrec);
6121 gimple_seq_add_stmt (ilist, g);
6123 if (sctx.lane)
6125 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6126 2 + (nonconst_simd_if != NULL),
6127 uid, integer_zero_node,
6128 nonconst_simd_if);
6129 gimple_call_set_lhs (g, sctx.lane);
6130 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6131 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6132 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6133 build_int_cst (unsigned_type_node, 0));
6134 gimple_seq_add_stmt (ilist, g);
6135 if (sctx.lastlane)
6137 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6138 2, uid, sctx.lane);
6139 gimple_call_set_lhs (g, sctx.lastlane);
6140 gimple_seq_add_stmt (dlist, g);
6141 gimple_seq_add_seq (dlist, llist[3]);
6143 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6144 if (llist[2])
6146 tree simt_vf = create_tmp_var (unsigned_type_node);
6147 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6148 gimple_call_set_lhs (g, simt_vf);
6149 gimple_seq_add_stmt (dlist, g);
6151 tree t = build_int_cst (unsigned_type_node, 1);
6152 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6153 gimple_seq_add_stmt (dlist, g);
6155 t = build_int_cst (unsigned_type_node, 0);
6156 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6157 gimple_seq_add_stmt (dlist, g);
6159 tree body = create_artificial_label (UNKNOWN_LOCATION);
6160 tree header = create_artificial_label (UNKNOWN_LOCATION);
6161 tree end = create_artificial_label (UNKNOWN_LOCATION);
6162 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6163 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6165 gimple_seq_add_seq (dlist, llist[2]);
6167 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6168 gimple_seq_add_stmt (dlist, g);
6170 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6171 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6172 gimple_seq_add_stmt (dlist, g);
6174 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6176 for (int i = 0; i < 2; i++)
6177 if (llist[i])
6179 tree vf = create_tmp_var (unsigned_type_node);
6180 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6181 gimple_call_set_lhs (g, vf);
6182 gimple_seq *seq = i == 0 ? ilist : dlist;
6183 gimple_seq_add_stmt (seq, g);
6184 tree t = build_int_cst (unsigned_type_node, 0);
6185 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6186 gimple_seq_add_stmt (seq, g);
6187 tree body = create_artificial_label (UNKNOWN_LOCATION);
6188 tree header = create_artificial_label (UNKNOWN_LOCATION);
6189 tree end = create_artificial_label (UNKNOWN_LOCATION);
6190 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6191 gimple_seq_add_stmt (seq, gimple_build_label (body));
6192 gimple_seq_add_seq (seq, llist[i]);
6193 t = build_int_cst (unsigned_type_node, 1);
6194 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6195 gimple_seq_add_stmt (seq, g);
6196 gimple_seq_add_stmt (seq, gimple_build_label (header));
6197 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6198 gimple_seq_add_stmt (seq, g);
6199 gimple_seq_add_stmt (seq, gimple_build_label (end));
6202 if (sctx.is_simt)
6204 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6205 gimple *g
6206 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6207 gimple_seq_add_stmt (dlist, g);
6210 /* The copyin sequence is not to be executed by the main thread, since
6211 that would result in self-copies. Perhaps not visible to scalars,
6212 but it certainly is to C++ operator=. */
6213 if (copyin_seq)
6215 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6217 x = build2 (NE_EXPR, boolean_type_node, x,
6218 build_int_cst (TREE_TYPE (x), 0));
6219 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6220 gimplify_and_add (x, ilist);
6223 /* If any copyin variable is passed by reference, we must ensure the
6224 master thread doesn't modify it before it is copied over in all
6225 threads. Similarly for variables in both firstprivate and
6226 lastprivate clauses we need to ensure the lastprivate copying
6227 happens after firstprivate copying in all threads. And similarly
6228 for UDRs if initializer expression refers to omp_orig. */
6229 if (copyin_by_ref || lastprivate_firstprivate
6230 || (reduction_omp_orig_ref
6231 && !ctx->scan_inclusive
6232 && !ctx->scan_exclusive))
6234 /* Don't add any barrier for #pragma omp simd or
6235 #pragma omp distribute. */
6236 if (!is_task_ctx (ctx)
6237 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6238 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6239 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6242 /* If max_vf is non-zero, then we can use only a vectorization factor
6243 up to the max_vf we chose. So stick it into the safelen clause. */
6244 if (maybe_ne (sctx.max_vf, 0U))
6246 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6247 OMP_CLAUSE_SAFELEN);
6248 poly_uint64 safe_len;
6249 if (c == NULL_TREE
6250 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6251 && maybe_gt (safe_len, sctx.max_vf)))
6253 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6254 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6255 sctx.max_vf);
6256 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6257 gimple_omp_for_set_clauses (ctx->stmt, c);
6262 /* Create temporary variables for lastprivate(conditional:) implementation
6263 in context CTX with CLAUSES. */
6265 static void
6266 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6268 tree iter_type = NULL_TREE;
6269 tree cond_ptr = NULL_TREE;
6270 tree iter_var = NULL_TREE;
6271 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6272 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6273 tree next = *clauses;
6274 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6275 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6276 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6278 if (is_simd)
6280 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6281 gcc_assert (cc);
6282 if (iter_type == NULL_TREE)
6284 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6285 iter_var = create_tmp_var_raw (iter_type);
6286 DECL_CONTEXT (iter_var) = current_function_decl;
6287 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6288 DECL_CHAIN (iter_var) = ctx->block_vars;
6289 ctx->block_vars = iter_var;
6290 tree c3
6291 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6292 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6293 OMP_CLAUSE_DECL (c3) = iter_var;
6294 OMP_CLAUSE_CHAIN (c3) = *clauses;
6295 *clauses = c3;
6296 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6298 next = OMP_CLAUSE_CHAIN (cc);
6299 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6300 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6301 ctx->lastprivate_conditional_map->put (o, v);
6302 continue;
6304 if (iter_type == NULL)
6306 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6308 struct omp_for_data fd;
6309 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6310 NULL);
6311 iter_type = unsigned_type_for (fd.iter_type);
6313 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6314 iter_type = unsigned_type_node;
6315 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6316 if (c2)
6318 cond_ptr
6319 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6320 OMP_CLAUSE_DECL (c2) = cond_ptr;
6322 else
6324 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6325 DECL_CONTEXT (cond_ptr) = current_function_decl;
6326 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6327 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6328 ctx->block_vars = cond_ptr;
6329 c2 = build_omp_clause (UNKNOWN_LOCATION,
6330 OMP_CLAUSE__CONDTEMP_);
6331 OMP_CLAUSE_DECL (c2) = cond_ptr;
6332 OMP_CLAUSE_CHAIN (c2) = *clauses;
6333 *clauses = c2;
6335 iter_var = create_tmp_var_raw (iter_type);
6336 DECL_CONTEXT (iter_var) = current_function_decl;
6337 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6338 DECL_CHAIN (iter_var) = ctx->block_vars;
6339 ctx->block_vars = iter_var;
6340 tree c3
6341 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6342 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6343 OMP_CLAUSE_DECL (c3) = iter_var;
6344 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6345 OMP_CLAUSE_CHAIN (c2) = c3;
6346 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6348 tree v = create_tmp_var_raw (iter_type);
6349 DECL_CONTEXT (v) = current_function_decl;
6350 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6351 DECL_CHAIN (v) = ctx->block_vars;
6352 ctx->block_vars = v;
6353 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6354 ctx->lastprivate_conditional_map->put (o, v);
6359 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6360 both parallel and workshare constructs. PREDICATE may be NULL if it's
6361 always true. BODY_P is the sequence to insert early initialization
6362 if needed, STMT_LIST is where the non-conditional lastprivate handling
6363 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6364 section. */
6366 static void
6367 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6368 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6369 omp_context *ctx)
6371 tree x, c, label = NULL, orig_clauses = clauses;
6372 bool par_clauses = false;
6373 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6374 unsigned HOST_WIDE_INT conditional_off = 0;
6375 gimple_seq post_stmt_list = NULL;
6377 /* Early exit if there are no lastprivate or linear clauses. */
6378 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6379 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6380 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6381 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6382 break;
6383 if (clauses == NULL)
6385 /* If this was a workshare clause, see if it had been combined
6386 with its parallel. In that case, look for the clauses on the
6387 parallel statement itself. */
6388 if (is_parallel_ctx (ctx))
6389 return;
6391 ctx = ctx->outer;
6392 if (ctx == NULL || !is_parallel_ctx (ctx))
6393 return;
6395 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6396 OMP_CLAUSE_LASTPRIVATE);
6397 if (clauses == NULL)
6398 return;
6399 par_clauses = true;
6402 bool maybe_simt = false;
6403 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6404 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6406 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6407 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6408 if (simduid)
6409 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6412 if (predicate)
6414 gcond *stmt;
6415 tree label_true, arm1, arm2;
6416 enum tree_code pred_code = TREE_CODE (predicate);
6418 label = create_artificial_label (UNKNOWN_LOCATION);
6419 label_true = create_artificial_label (UNKNOWN_LOCATION);
6420 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6422 arm1 = TREE_OPERAND (predicate, 0);
6423 arm2 = TREE_OPERAND (predicate, 1);
6424 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6425 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6427 else
6429 arm1 = predicate;
6430 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6431 arm2 = boolean_false_node;
6432 pred_code = NE_EXPR;
6434 if (maybe_simt)
6436 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6437 c = fold_convert (integer_type_node, c);
6438 simtcond = create_tmp_var (integer_type_node);
6439 gimplify_assign (simtcond, c, stmt_list);
6440 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6441 1, simtcond);
6442 c = create_tmp_var (integer_type_node);
6443 gimple_call_set_lhs (g, c);
6444 gimple_seq_add_stmt (stmt_list, g);
6445 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6446 label_true, label);
6448 else
6449 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6450 gimple_seq_add_stmt (stmt_list, stmt);
6451 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6454 tree cond_ptr = NULL_TREE;
6455 for (c = clauses; c ;)
6457 tree var, new_var;
6458 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6459 gimple_seq *this_stmt_list = stmt_list;
6460 tree lab2 = NULL_TREE;
6462 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6463 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6464 && ctx->lastprivate_conditional_map
6465 && !ctx->combined_into_simd_safelen1)
6467 gcc_assert (body_p);
6468 if (simduid)
6469 goto next;
6470 if (cond_ptr == NULL_TREE)
6472 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6473 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6475 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6476 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6477 tree v = *ctx->lastprivate_conditional_map->get (o);
6478 gimplify_assign (v, build_zero_cst (type), body_p);
6479 this_stmt_list = cstmt_list;
6480 tree mem;
6481 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6483 mem = build2 (MEM_REF, type, cond_ptr,
6484 build_int_cst (TREE_TYPE (cond_ptr),
6485 conditional_off));
6486 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6488 else
6489 mem = build4 (ARRAY_REF, type, cond_ptr,
6490 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6491 tree mem2 = copy_node (mem);
6492 gimple_seq seq = NULL;
6493 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6494 gimple_seq_add_seq (this_stmt_list, seq);
6495 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6496 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6497 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6498 gimple_seq_add_stmt (this_stmt_list, g);
6499 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6500 gimplify_assign (mem2, v, this_stmt_list);
6502 else if (predicate
6503 && ctx->combined_into_simd_safelen1
6504 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6505 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6506 && ctx->lastprivate_conditional_map)
6507 this_stmt_list = &post_stmt_list;
6509 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6510 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6511 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6513 var = OMP_CLAUSE_DECL (c);
6514 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6515 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6516 && is_taskloop_ctx (ctx))
6518 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6519 new_var = lookup_decl (var, ctx->outer);
6521 else
6523 new_var = lookup_decl (var, ctx);
6524 /* Avoid uninitialized warnings for lastprivate and
6525 for linear iterators. */
6526 if (predicate
6527 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6528 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6529 TREE_NO_WARNING (new_var) = 1;
6532 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6534 tree val = DECL_VALUE_EXPR (new_var);
6535 if (TREE_CODE (val) == ARRAY_REF
6536 && VAR_P (TREE_OPERAND (val, 0))
6537 && lookup_attribute ("omp simd array",
6538 DECL_ATTRIBUTES (TREE_OPERAND (val,
6539 0))))
6541 if (lastlane == NULL)
6543 lastlane = create_tmp_var (unsigned_type_node);
6544 gcall *g
6545 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6546 2, simduid,
6547 TREE_OPERAND (val, 1));
6548 gimple_call_set_lhs (g, lastlane);
6549 gimple_seq_add_stmt (this_stmt_list, g);
6551 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6552 TREE_OPERAND (val, 0), lastlane,
6553 NULL_TREE, NULL_TREE);
6554 TREE_THIS_NOTRAP (new_var) = 1;
6557 else if (maybe_simt)
6559 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6560 ? DECL_VALUE_EXPR (new_var)
6561 : new_var);
6562 if (simtlast == NULL)
6564 simtlast = create_tmp_var (unsigned_type_node);
6565 gcall *g = gimple_build_call_internal
6566 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6567 gimple_call_set_lhs (g, simtlast);
6568 gimple_seq_add_stmt (this_stmt_list, g);
6570 x = build_call_expr_internal_loc
6571 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6572 TREE_TYPE (val), 2, val, simtlast);
6573 new_var = unshare_expr (new_var);
6574 gimplify_assign (new_var, x, this_stmt_list);
6575 new_var = unshare_expr (new_var);
6578 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6579 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6581 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6582 gimple_seq_add_seq (this_stmt_list,
6583 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6584 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6586 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6587 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6589 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6590 gimple_seq_add_seq (this_stmt_list,
6591 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6592 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6595 x = NULL_TREE;
6596 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6597 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
6598 && is_taskloop_ctx (ctx))
6600 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6601 ctx->outer->outer);
6602 if (is_global_var (ovar))
6603 x = ovar;
6605 if (!x)
6606 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6607 if (omp_is_reference (var))
6608 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6609 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6610 gimplify_and_add (x, this_stmt_list);
6612 if (lab2)
6613 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6616 next:
6617 c = OMP_CLAUSE_CHAIN (c);
6618 if (c == NULL && !par_clauses)
6620 /* If this was a workshare clause, see if it had been combined
6621 with its parallel. In that case, continue looking for the
6622 clauses also on the parallel statement itself. */
6623 if (is_parallel_ctx (ctx))
6624 break;
6626 ctx = ctx->outer;
6627 if (ctx == NULL || !is_parallel_ctx (ctx))
6628 break;
6630 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6631 OMP_CLAUSE_LASTPRIVATE);
6632 par_clauses = true;
6636 if (label)
6637 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6638 gimple_seq_add_seq (stmt_list, post_stmt_list);
6641 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6642 (which might be a placeholder). INNER is true if this is an inner
6643 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6644 join markers. Generate the before-loop forking sequence in
6645 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6646 general form of these sequences is
6648 GOACC_REDUCTION_SETUP
6649 GOACC_FORK
6650 GOACC_REDUCTION_INIT
6652 GOACC_REDUCTION_FINI
6653 GOACC_JOIN
6654 GOACC_REDUCTION_TEARDOWN. */
6656 static void
6657 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6658 gcall *fork, gcall *join, gimple_seq *fork_seq,
6659 gimple_seq *join_seq, omp_context *ctx)
6661 gimple_seq before_fork = NULL;
6662 gimple_seq after_fork = NULL;
6663 gimple_seq before_join = NULL;
6664 gimple_seq after_join = NULL;
6665 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6666 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6667 unsigned offset = 0;
6669 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6670 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6672 tree orig = OMP_CLAUSE_DECL (c);
6673 tree var = maybe_lookup_decl (orig, ctx);
6674 tree ref_to_res = NULL_TREE;
6675 tree incoming, outgoing, v1, v2, v3;
6676 bool is_private = false;
6678 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6679 if (rcode == MINUS_EXPR)
6680 rcode = PLUS_EXPR;
6681 else if (rcode == TRUTH_ANDIF_EXPR)
6682 rcode = BIT_AND_EXPR;
6683 else if (rcode == TRUTH_ORIF_EXPR)
6684 rcode = BIT_IOR_EXPR;
6685 tree op = build_int_cst (unsigned_type_node, rcode);
6687 if (!var)
6688 var = orig;
6690 incoming = outgoing = var;
6692 if (!inner)
6694 /* See if an outer construct also reduces this variable. */
6695 omp_context *outer = ctx;
6697 while (omp_context *probe = outer->outer)
6699 enum gimple_code type = gimple_code (probe->stmt);
6700 tree cls;
6702 switch (type)
6704 case GIMPLE_OMP_FOR:
6705 cls = gimple_omp_for_clauses (probe->stmt);
6706 break;
6708 case GIMPLE_OMP_TARGET:
6709 if ((gimple_omp_target_kind (probe->stmt)
6710 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6711 && (gimple_omp_target_kind (probe->stmt)
6712 != GF_OMP_TARGET_KIND_OACC_SERIAL))
6713 goto do_lookup;
6715 cls = gimple_omp_target_clauses (probe->stmt);
6716 break;
6718 default:
6719 goto do_lookup;
6722 outer = probe;
6723 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6724 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6725 && orig == OMP_CLAUSE_DECL (cls))
6727 incoming = outgoing = lookup_decl (orig, probe);
6728 goto has_outer_reduction;
6730 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6731 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6732 && orig == OMP_CLAUSE_DECL (cls))
6734 is_private = true;
6735 goto do_lookup;
6739 do_lookup:
6740 /* This is the outermost construct with this reduction,
6741 see if there's a mapping for it. */
6742 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6743 && maybe_lookup_field (orig, outer) && !is_private)
6745 ref_to_res = build_receiver_ref (orig, false, outer);
6746 if (omp_is_reference (orig))
6747 ref_to_res = build_simple_mem_ref (ref_to_res);
6749 tree type = TREE_TYPE (var);
6750 if (POINTER_TYPE_P (type))
6751 type = TREE_TYPE (type);
6753 outgoing = var;
6754 incoming = omp_reduction_init_op (loc, rcode, type);
6756 else
6758 /* Try to look at enclosing contexts for reduction var,
6759 use original if no mapping found. */
6760 tree t = NULL_TREE;
6761 omp_context *c = ctx->outer;
6762 while (c && !t)
6764 t = maybe_lookup_decl (orig, c);
6765 c = c->outer;
6767 incoming = outgoing = (t ? t : orig);
6770 has_outer_reduction:;
6773 if (!ref_to_res)
6774 ref_to_res = integer_zero_node;
6776 if (omp_is_reference (orig))
6778 tree type = TREE_TYPE (var);
6779 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6781 if (!inner)
6783 tree x = create_tmp_var (TREE_TYPE (type), id);
6784 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6787 v1 = create_tmp_var (type, id);
6788 v2 = create_tmp_var (type, id);
6789 v3 = create_tmp_var (type, id);
6791 gimplify_assign (v1, var, fork_seq);
6792 gimplify_assign (v2, var, fork_seq);
6793 gimplify_assign (v3, var, fork_seq);
6795 var = build_simple_mem_ref (var);
6796 v1 = build_simple_mem_ref (v1);
6797 v2 = build_simple_mem_ref (v2);
6798 v3 = build_simple_mem_ref (v3);
6799 outgoing = build_simple_mem_ref (outgoing);
6801 if (!TREE_CONSTANT (incoming))
6802 incoming = build_simple_mem_ref (incoming);
6804 else
6805 v1 = v2 = v3 = var;
6807 /* Determine position in reduction buffer, which may be used
6808 by target. The parser has ensured that this is not a
6809 variable-sized type. */
6810 fixed_size_mode mode
6811 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6812 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6813 offset = (offset + align - 1) & ~(align - 1);
6814 tree off = build_int_cst (sizetype, offset);
6815 offset += GET_MODE_SIZE (mode);
6817 if (!init_code)
6819 init_code = build_int_cst (integer_type_node,
6820 IFN_GOACC_REDUCTION_INIT);
6821 fini_code = build_int_cst (integer_type_node,
6822 IFN_GOACC_REDUCTION_FINI);
6823 setup_code = build_int_cst (integer_type_node,
6824 IFN_GOACC_REDUCTION_SETUP);
6825 teardown_code = build_int_cst (integer_type_node,
6826 IFN_GOACC_REDUCTION_TEARDOWN);
6829 tree setup_call
6830 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6831 TREE_TYPE (var), 6, setup_code,
6832 unshare_expr (ref_to_res),
6833 incoming, level, op, off);
6834 tree init_call
6835 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6836 TREE_TYPE (var), 6, init_code,
6837 unshare_expr (ref_to_res),
6838 v1, level, op, off);
6839 tree fini_call
6840 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6841 TREE_TYPE (var), 6, fini_code,
6842 unshare_expr (ref_to_res),
6843 v2, level, op, off);
6844 tree teardown_call
6845 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6846 TREE_TYPE (var), 6, teardown_code,
6847 ref_to_res, v3, level, op, off);
6849 gimplify_assign (v1, setup_call, &before_fork);
6850 gimplify_assign (v2, init_call, &after_fork);
6851 gimplify_assign (v3, fini_call, &before_join);
6852 gimplify_assign (outgoing, teardown_call, &after_join);
6855 /* Now stitch things together. */
6856 gimple_seq_add_seq (fork_seq, before_fork);
6857 if (fork)
6858 gimple_seq_add_stmt (fork_seq, fork);
6859 gimple_seq_add_seq (fork_seq, after_fork);
6861 gimple_seq_add_seq (join_seq, before_join);
6862 if (join)
6863 gimple_seq_add_stmt (join_seq, join);
6864 gimple_seq_add_seq (join_seq, after_join);
6867 /* Generate code to implement the REDUCTION clauses, append it
6868 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6869 that should be emitted also inside of the critical section,
6870 in that case clear *CLIST afterwards, otherwise leave it as is
6871 and let the caller emit it itself. */
6873 static void
6874 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6875 gimple_seq *clist, omp_context *ctx)
6877 gimple_seq sub_seq = NULL;
6878 gimple *stmt;
6879 tree x, c;
6880 int count = 0;
6882 /* OpenACC loop reductions are handled elsewhere. */
6883 if (is_gimple_omp_oacc (ctx->stmt))
6884 return;
6886 /* SIMD reductions are handled in lower_rec_input_clauses. */
6887 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6888 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6889 return;
6891 /* inscan reductions are handled elsewhere. */
6892 if (ctx->scan_inclusive || ctx->scan_exclusive)
6893 return;
6895 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6896 update in that case, otherwise use a lock. */
6897 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6898 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6899 && !OMP_CLAUSE_REDUCTION_TASK (c))
6901 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6902 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6904 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6905 count = -1;
6906 break;
6908 count++;
6911 if (count == 0)
6912 return;
6914 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6916 tree var, ref, new_var, orig_var;
6917 enum tree_code code;
6918 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6920 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6921 || OMP_CLAUSE_REDUCTION_TASK (c))
6922 continue;
6924 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6925 orig_var = var = OMP_CLAUSE_DECL (c);
6926 if (TREE_CODE (var) == MEM_REF)
6928 var = TREE_OPERAND (var, 0);
6929 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6930 var = TREE_OPERAND (var, 0);
6931 if (TREE_CODE (var) == ADDR_EXPR)
6932 var = TREE_OPERAND (var, 0);
6933 else
6935 /* If this is a pointer or referenced based array
6936 section, the var could be private in the outer
6937 context e.g. on orphaned loop construct. Pretend this
6938 is private variable's outer reference. */
6939 ccode = OMP_CLAUSE_PRIVATE;
6940 if (TREE_CODE (var) == INDIRECT_REF)
6941 var = TREE_OPERAND (var, 0);
6943 orig_var = var;
6944 if (is_variable_sized (var))
6946 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6947 var = DECL_VALUE_EXPR (var);
6948 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6949 var = TREE_OPERAND (var, 0);
6950 gcc_assert (DECL_P (var));
6953 new_var = lookup_decl (var, ctx);
6954 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
6955 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6956 ref = build_outer_var_ref (var, ctx, ccode);
6957 code = OMP_CLAUSE_REDUCTION_CODE (c);
6959 /* reduction(-:var) sums up the partial results, so it acts
6960 identically to reduction(+:var). */
6961 if (code == MINUS_EXPR)
6962 code = PLUS_EXPR;
6964 if (count == 1)
6966 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
6968 addr = save_expr (addr);
6969 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
6970 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
6971 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
6972 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
6973 gimplify_and_add (x, stmt_seqp);
6974 return;
6976 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6978 tree d = OMP_CLAUSE_DECL (c);
6979 tree type = TREE_TYPE (d);
6980 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6981 tree i = create_tmp_var (TREE_TYPE (v));
6982 tree ptype = build_pointer_type (TREE_TYPE (type));
6983 tree bias = TREE_OPERAND (d, 1);
6984 d = TREE_OPERAND (d, 0);
6985 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
6987 tree b = TREE_OPERAND (d, 1);
6988 b = maybe_lookup_decl (b, ctx);
6989 if (b == NULL)
6991 b = TREE_OPERAND (d, 1);
6992 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
6994 if (integer_zerop (bias))
6995 bias = b;
6996 else
6998 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
6999 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7000 TREE_TYPE (b), b, bias);
7002 d = TREE_OPERAND (d, 0);
7004 /* For ref build_outer_var_ref already performs this, so
7005 only new_var needs a dereference. */
7006 if (TREE_CODE (d) == INDIRECT_REF)
7008 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7009 gcc_assert (omp_is_reference (var) && var == orig_var);
7011 else if (TREE_CODE (d) == ADDR_EXPR)
7013 if (orig_var == var)
7015 new_var = build_fold_addr_expr (new_var);
7016 ref = build_fold_addr_expr (ref);
7019 else
7021 gcc_assert (orig_var == var);
7022 if (omp_is_reference (var))
7023 ref = build_fold_addr_expr (ref);
7025 if (DECL_P (v))
7027 tree t = maybe_lookup_decl (v, ctx);
7028 if (t)
7029 v = t;
7030 else
7031 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7032 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7034 if (!integer_zerop (bias))
7036 bias = fold_convert_loc (clause_loc, sizetype, bias);
7037 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7038 TREE_TYPE (new_var), new_var,
7039 unshare_expr (bias));
7040 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7041 TREE_TYPE (ref), ref, bias);
7043 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7044 ref = fold_convert_loc (clause_loc, ptype, ref);
7045 tree m = create_tmp_var (ptype);
7046 gimplify_assign (m, new_var, stmt_seqp);
7047 new_var = m;
7048 m = create_tmp_var (ptype);
7049 gimplify_assign (m, ref, stmt_seqp);
7050 ref = m;
7051 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7052 tree body = create_artificial_label (UNKNOWN_LOCATION);
7053 tree end = create_artificial_label (UNKNOWN_LOCATION);
7054 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7055 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7056 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7057 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7059 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7060 tree decl_placeholder
7061 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7062 SET_DECL_VALUE_EXPR (placeholder, out);
7063 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7064 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7065 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7066 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7067 gimple_seq_add_seq (&sub_seq,
7068 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7069 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7070 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7071 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7073 else
7075 x = build2 (code, TREE_TYPE (out), out, priv);
7076 out = unshare_expr (out);
7077 gimplify_assign (out, x, &sub_seq);
7079 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7080 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7081 gimple_seq_add_stmt (&sub_seq, g);
7082 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7083 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7084 gimple_seq_add_stmt (&sub_seq, g);
7085 g = gimple_build_assign (i, PLUS_EXPR, i,
7086 build_int_cst (TREE_TYPE (i), 1));
7087 gimple_seq_add_stmt (&sub_seq, g);
7088 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7089 gimple_seq_add_stmt (&sub_seq, g);
7090 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7092 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7094 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7096 if (omp_is_reference (var)
7097 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7098 TREE_TYPE (ref)))
7099 ref = build_fold_addr_expr_loc (clause_loc, ref);
7100 SET_DECL_VALUE_EXPR (placeholder, ref);
7101 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7102 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7103 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7104 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7105 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7107 else
7109 x = build2 (code, TREE_TYPE (ref), ref, new_var);
7110 ref = build_outer_var_ref (var, ctx);
7111 gimplify_assign (ref, x, &sub_seq);
7115 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7117 gimple_seq_add_stmt (stmt_seqp, stmt);
7119 gimple_seq_add_seq (stmt_seqp, sub_seq);
7121 if (clist)
7123 gimple_seq_add_seq (stmt_seqp, *clist);
7124 *clist = NULL;
7127 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7129 gimple_seq_add_stmt (stmt_seqp, stmt);
7133 /* Generate code to implement the COPYPRIVATE clauses. */
7135 static void
7136 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7137 omp_context *ctx)
7139 tree c;
7141 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7143 tree var, new_var, ref, x;
7144 bool by_ref;
7145 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7147 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7148 continue;
7150 var = OMP_CLAUSE_DECL (c);
7151 by_ref = use_pointer_for_field (var, NULL);
7153 ref = build_sender_ref (var, ctx);
7154 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7155 if (by_ref)
7157 x = build_fold_addr_expr_loc (clause_loc, new_var);
7158 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7160 gimplify_assign (ref, x, slist);
7162 ref = build_receiver_ref (var, false, ctx);
7163 if (by_ref)
7165 ref = fold_convert_loc (clause_loc,
7166 build_pointer_type (TREE_TYPE (new_var)),
7167 ref);
7168 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7170 if (omp_is_reference (var))
7172 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7173 ref = build_simple_mem_ref_loc (clause_loc, ref);
7174 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7176 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7177 gimplify_and_add (x, rlist);
7182 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7183 and REDUCTION from the sender (aka parent) side. */
7185 static void
7186 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7187 omp_context *ctx)
7189 tree c, t;
7190 int ignored_looptemp = 0;
7191 bool is_taskloop = false;
7193 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7194 by GOMP_taskloop. */
7195 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7197 ignored_looptemp = 2;
7198 is_taskloop = true;
7201 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7203 tree val, ref, x, var;
7204 bool by_ref, do_in = false, do_out = false;
7205 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7207 switch (OMP_CLAUSE_CODE (c))
7209 case OMP_CLAUSE_PRIVATE:
7210 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7211 break;
7212 continue;
7213 case OMP_CLAUSE_FIRSTPRIVATE:
7214 case OMP_CLAUSE_COPYIN:
7215 case OMP_CLAUSE_LASTPRIVATE:
7216 case OMP_CLAUSE_IN_REDUCTION:
7217 case OMP_CLAUSE__REDUCTEMP_:
7218 break;
7219 case OMP_CLAUSE_REDUCTION:
7220 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7221 continue;
7222 break;
7223 case OMP_CLAUSE_SHARED:
7224 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7225 break;
7226 continue;
7227 case OMP_CLAUSE__LOOPTEMP_:
7228 if (ignored_looptemp)
7230 ignored_looptemp--;
7231 continue;
7233 break;
7234 default:
7235 continue;
7238 val = OMP_CLAUSE_DECL (c);
7239 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7240 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7241 && TREE_CODE (val) == MEM_REF)
7243 val = TREE_OPERAND (val, 0);
7244 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7245 val = TREE_OPERAND (val, 0);
7246 if (TREE_CODE (val) == INDIRECT_REF
7247 || TREE_CODE (val) == ADDR_EXPR)
7248 val = TREE_OPERAND (val, 0);
7249 if (is_variable_sized (val))
7250 continue;
7253 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7254 outer taskloop region. */
7255 omp_context *ctx_for_o = ctx;
7256 if (is_taskloop
7257 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7258 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7259 ctx_for_o = ctx->outer;
7261 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7263 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7264 && is_global_var (var)
7265 && (val == OMP_CLAUSE_DECL (c)
7266 || !is_task_ctx (ctx)
7267 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7268 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7269 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7270 != POINTER_TYPE)))))
7271 continue;
7273 t = omp_member_access_dummy_var (var);
7274 if (t)
7276 var = DECL_VALUE_EXPR (var);
7277 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7278 if (o != t)
7279 var = unshare_and_remap (var, t, o);
7280 else
7281 var = unshare_expr (var);
7284 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7286 /* Handle taskloop firstprivate/lastprivate, where the
7287 lastprivate on GIMPLE_OMP_TASK is represented as
7288 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7289 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7290 x = omp_build_component_ref (ctx->sender_decl, f);
7291 if (use_pointer_for_field (val, ctx))
7292 var = build_fold_addr_expr (var);
7293 gimplify_assign (x, var, ilist);
7294 DECL_ABSTRACT_ORIGIN (f) = NULL;
7295 continue;
7298 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7299 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7300 || val == OMP_CLAUSE_DECL (c))
7301 && is_variable_sized (val))
7302 continue;
7303 by_ref = use_pointer_for_field (val, NULL);
7305 switch (OMP_CLAUSE_CODE (c))
7307 case OMP_CLAUSE_FIRSTPRIVATE:
7308 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7309 && !by_ref
7310 && is_task_ctx (ctx))
7311 TREE_NO_WARNING (var) = 1;
7312 do_in = true;
7313 break;
7315 case OMP_CLAUSE_PRIVATE:
7316 case OMP_CLAUSE_COPYIN:
7317 case OMP_CLAUSE__LOOPTEMP_:
7318 case OMP_CLAUSE__REDUCTEMP_:
7319 do_in = true;
7320 break;
7322 case OMP_CLAUSE_LASTPRIVATE:
7323 if (by_ref || omp_is_reference (val))
7325 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7326 continue;
7327 do_in = true;
7329 else
7331 do_out = true;
7332 if (lang_hooks.decls.omp_private_outer_ref (val))
7333 do_in = true;
7335 break;
7337 case OMP_CLAUSE_REDUCTION:
7338 case OMP_CLAUSE_IN_REDUCTION:
7339 do_in = true;
7340 if (val == OMP_CLAUSE_DECL (c))
7342 if (is_task_ctx (ctx))
7343 by_ref = use_pointer_for_field (val, ctx);
7344 else
7345 do_out = !(by_ref || omp_is_reference (val));
7347 else
7348 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7349 break;
7351 default:
7352 gcc_unreachable ();
7355 if (do_in)
7357 ref = build_sender_ref (val, ctx);
7358 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7359 gimplify_assign (ref, x, ilist);
7360 if (is_task_ctx (ctx))
7361 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7364 if (do_out)
7366 ref = build_sender_ref (val, ctx);
7367 gimplify_assign (var, ref, olist);
7372 /* Generate code to implement SHARED from the sender (aka parent)
7373 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7374 list things that got automatically shared. */
7376 static void
7377 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7379 tree var, ovar, nvar, t, f, x, record_type;
7381 if (ctx->record_type == NULL)
7382 return;
7384 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7385 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7387 ovar = DECL_ABSTRACT_ORIGIN (f);
7388 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7389 continue;
7391 nvar = maybe_lookup_decl (ovar, ctx);
7392 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
7393 continue;
7395 /* If CTX is a nested parallel directive. Find the immediately
7396 enclosing parallel or workshare construct that contains a
7397 mapping for OVAR. */
7398 var = lookup_decl_in_outer_ctx (ovar, ctx);
7400 t = omp_member_access_dummy_var (var);
7401 if (t)
7403 var = DECL_VALUE_EXPR (var);
7404 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7405 if (o != t)
7406 var = unshare_and_remap (var, t, o);
7407 else
7408 var = unshare_expr (var);
7411 if (use_pointer_for_field (ovar, ctx))
7413 x = build_sender_ref (ovar, ctx);
7414 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7415 && TREE_TYPE (f) == TREE_TYPE (ovar))
7417 gcc_assert (is_parallel_ctx (ctx)
7418 && DECL_ARTIFICIAL (ovar));
7419 /* _condtemp_ clause. */
7420 var = build_constructor (TREE_TYPE (x), NULL);
7422 else
7423 var = build_fold_addr_expr (var);
7424 gimplify_assign (x, var, ilist);
7426 else
7428 x = build_sender_ref (ovar, ctx);
7429 gimplify_assign (x, var, ilist);
7431 if (!TREE_READONLY (var)
7432 /* We don't need to receive a new reference to a result
7433 or parm decl. In fact we may not store to it as we will
7434 invalidate any pending RSO and generate wrong gimple
7435 during inlining. */
7436 && !((TREE_CODE (var) == RESULT_DECL
7437 || TREE_CODE (var) == PARM_DECL)
7438 && DECL_BY_REFERENCE (var)))
7440 x = build_sender_ref (ovar, ctx);
7441 gimplify_assign (var, x, olist);
7447 /* Emit an OpenACC head marker call, encapulating the partitioning and
7448 other information that must be processed by the target compiler.
7449 Return the maximum number of dimensions the associated loop might
7450 be partitioned over. */
7452 static unsigned
7453 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7454 gimple_seq *seq, omp_context *ctx)
7456 unsigned levels = 0;
7457 unsigned tag = 0;
7458 tree gang_static = NULL_TREE;
7459 auto_vec<tree, 5> args;
7461 args.quick_push (build_int_cst
7462 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7463 args.quick_push (ddvar);
7464 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7466 switch (OMP_CLAUSE_CODE (c))
7468 case OMP_CLAUSE_GANG:
7469 tag |= OLF_DIM_GANG;
7470 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7471 /* static:* is represented by -1, and we can ignore it, as
7472 scheduling is always static. */
7473 if (gang_static && integer_minus_onep (gang_static))
7474 gang_static = NULL_TREE;
7475 levels++;
7476 break;
7478 case OMP_CLAUSE_WORKER:
7479 tag |= OLF_DIM_WORKER;
7480 levels++;
7481 break;
7483 case OMP_CLAUSE_VECTOR:
7484 tag |= OLF_DIM_VECTOR;
7485 levels++;
7486 break;
7488 case OMP_CLAUSE_SEQ:
7489 tag |= OLF_SEQ;
7490 break;
7492 case OMP_CLAUSE_AUTO:
7493 tag |= OLF_AUTO;
7494 break;
7496 case OMP_CLAUSE_INDEPENDENT:
7497 tag |= OLF_INDEPENDENT;
7498 break;
7500 case OMP_CLAUSE_TILE:
7501 tag |= OLF_TILE;
7502 break;
7504 default:
7505 continue;
7509 if (gang_static)
7511 if (DECL_P (gang_static))
7512 gang_static = build_outer_var_ref (gang_static, ctx);
7513 tag |= OLF_GANG_STATIC;
7516 /* In a parallel region, loops are implicitly INDEPENDENT. */
7517 omp_context *tgt = enclosing_target_ctx (ctx);
7518 if (!tgt || is_oacc_parallel_or_serial (tgt))
7519 tag |= OLF_INDEPENDENT;
7521 if (tag & OLF_TILE)
7522 /* Tiling could use all 3 levels. */
7523 levels = 3;
7524 else
7526 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7527 Ensure at least one level, or 2 for possible auto
7528 partitioning */
7529 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7530 << OLF_DIM_BASE) | OLF_SEQ));
7532 if (levels < 1u + maybe_auto)
7533 levels = 1u + maybe_auto;
7536 args.quick_push (build_int_cst (integer_type_node, levels));
7537 args.quick_push (build_int_cst (integer_type_node, tag));
7538 if (gang_static)
7539 args.quick_push (gang_static);
7541 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7542 gimple_set_location (call, loc);
7543 gimple_set_lhs (call, ddvar);
7544 gimple_seq_add_stmt (seq, call);
7546 return levels;
7549 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7550 partitioning level of the enclosed region. */
7552 static void
7553 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7554 tree tofollow, gimple_seq *seq)
7556 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7557 : IFN_UNIQUE_OACC_TAIL_MARK);
7558 tree marker = build_int_cst (integer_type_node, marker_kind);
7559 int nargs = 2 + (tofollow != NULL_TREE);
7560 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7561 marker, ddvar, tofollow);
7562 gimple_set_location (call, loc);
7563 gimple_set_lhs (call, ddvar);
7564 gimple_seq_add_stmt (seq, call);
7567 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7568 the loop clauses, from which we extract reductions. Initialize
7569 HEAD and TAIL. */
7571 static void
7572 lower_oacc_head_tail (location_t loc, tree clauses,
7573 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7575 bool inner = false;
7576 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7577 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7579 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7580 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7581 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7583 gcc_assert (count);
7584 for (unsigned done = 1; count; count--, done++)
7586 gimple_seq fork_seq = NULL;
7587 gimple_seq join_seq = NULL;
7589 tree place = build_int_cst (integer_type_node, -1);
7590 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7591 fork_kind, ddvar, place);
7592 gimple_set_location (fork, loc);
7593 gimple_set_lhs (fork, ddvar);
7595 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7596 join_kind, ddvar, place);
7597 gimple_set_location (join, loc);
7598 gimple_set_lhs (join, ddvar);
7600 /* Mark the beginning of this level sequence. */
7601 if (inner)
7602 lower_oacc_loop_marker (loc, ddvar, true,
7603 build_int_cst (integer_type_node, count),
7604 &fork_seq);
7605 lower_oacc_loop_marker (loc, ddvar, false,
7606 build_int_cst (integer_type_node, done),
7607 &join_seq);
7609 lower_oacc_reductions (loc, clauses, place, inner,
7610 fork, join, &fork_seq, &join_seq, ctx);
7612 /* Append this level to head. */
7613 gimple_seq_add_seq (head, fork_seq);
7614 /* Prepend it to tail. */
7615 gimple_seq_add_seq (&join_seq, *tail);
7616 *tail = join_seq;
7618 inner = true;
7621 /* Mark the end of the sequence. */
7622 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7623 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7626 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7627 catch handler and return it. This prevents programs from violating the
7628 structured block semantics with throws. */
7630 static gimple_seq
7631 maybe_catch_exception (gimple_seq body)
7633 gimple *g;
7634 tree decl;
7636 if (!flag_exceptions)
7637 return body;
7639 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7640 decl = lang_hooks.eh_protect_cleanup_actions ();
7641 else
7642 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7644 g = gimple_build_eh_must_not_throw (decl);
7645 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7646 GIMPLE_TRY_CATCH);
7648 return gimple_seq_alloc_with_stmt (g);
7652 /* Routines to lower OMP directives into OMP-GIMPLE. */
7654 /* If ctx is a worksharing context inside of a cancellable parallel
7655 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7656 and conditional branch to parallel's cancel_label to handle
7657 cancellation in the implicit barrier. */
7659 static void
7660 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7661 gimple_seq *body)
7663 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7664 if (gimple_omp_return_nowait_p (omp_return))
7665 return;
7666 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7667 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7668 && outer->cancellable)
7670 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7671 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7672 tree lhs = create_tmp_var (c_bool_type);
7673 gimple_omp_return_set_lhs (omp_return, lhs);
7674 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7675 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7676 fold_convert (c_bool_type,
7677 boolean_false_node),
7678 outer->cancel_label, fallthru_label);
7679 gimple_seq_add_stmt (body, g);
7680 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7682 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7683 return;
7686 /* Find the first task_reduction or reduction clause or return NULL
7687 if there are none. */
7689 static inline tree
7690 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7691 enum omp_clause_code ccode)
7693 while (1)
7695 clauses = omp_find_clause (clauses, ccode);
7696 if (clauses == NULL_TREE)
7697 return NULL_TREE;
7698 if (ccode != OMP_CLAUSE_REDUCTION
7699 || code == OMP_TASKLOOP
7700 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7701 return clauses;
7702 clauses = OMP_CLAUSE_CHAIN (clauses);
7706 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7707 gimple_seq *, gimple_seq *);
7709 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7710 CTX is the enclosing OMP context for the current statement. */
7712 static void
7713 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7715 tree block, control;
7716 gimple_stmt_iterator tgsi;
7717 gomp_sections *stmt;
7718 gimple *t;
7719 gbind *new_stmt, *bind;
7720 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7722 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7724 push_gimplify_context ();
7726 dlist = NULL;
7727 ilist = NULL;
7729 tree rclauses
7730 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7731 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7732 tree rtmp = NULL_TREE;
7733 if (rclauses)
7735 tree type = build_pointer_type (pointer_sized_int_node);
7736 tree temp = create_tmp_var (type);
7737 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7738 OMP_CLAUSE_DECL (c) = temp;
7739 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7740 gimple_omp_sections_set_clauses (stmt, c);
7741 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7742 gimple_omp_sections_clauses (stmt),
7743 &ilist, &tred_dlist);
7744 rclauses = c;
7745 rtmp = make_ssa_name (type);
7746 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7749 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7750 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7752 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7753 &ilist, &dlist, ctx, NULL);
7755 control = create_tmp_var (unsigned_type_node, ".section");
7756 gimple_omp_sections_set_control (stmt, control);
7758 new_body = gimple_omp_body (stmt);
7759 gimple_omp_set_body (stmt, NULL);
7760 tgsi = gsi_start (new_body);
7761 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7763 omp_context *sctx;
7764 gimple *sec_start;
7766 sec_start = gsi_stmt (tgsi);
7767 sctx = maybe_lookup_ctx (sec_start);
7768 gcc_assert (sctx);
7770 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7771 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7772 GSI_CONTINUE_LINKING);
7773 gimple_omp_set_body (sec_start, NULL);
7775 if (gsi_one_before_end_p (tgsi))
7777 gimple_seq l = NULL;
7778 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7779 &ilist, &l, &clist, ctx);
7780 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7781 gimple_omp_section_set_last (sec_start);
7784 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7785 GSI_CONTINUE_LINKING);
7788 block = make_node (BLOCK);
7789 bind = gimple_build_bind (NULL, new_body, block);
7791 olist = NULL;
7792 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7793 &clist, ctx);
7794 if (clist)
7796 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7797 gcall *g = gimple_build_call (fndecl, 0);
7798 gimple_seq_add_stmt (&olist, g);
7799 gimple_seq_add_seq (&olist, clist);
7800 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7801 g = gimple_build_call (fndecl, 0);
7802 gimple_seq_add_stmt (&olist, g);
7805 block = make_node (BLOCK);
7806 new_stmt = gimple_build_bind (NULL, NULL, block);
7807 gsi_replace (gsi_p, new_stmt, true);
7809 pop_gimplify_context (new_stmt);
7810 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7811 BLOCK_VARS (block) = gimple_bind_vars (bind);
7812 if (BLOCK_VARS (block))
7813 TREE_USED (block) = 1;
7815 new_body = NULL;
7816 gimple_seq_add_seq (&new_body, ilist);
7817 gimple_seq_add_stmt (&new_body, stmt);
7818 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7819 gimple_seq_add_stmt (&new_body, bind);
7821 t = gimple_build_omp_continue (control, control);
7822 gimple_seq_add_stmt (&new_body, t);
7824 gimple_seq_add_seq (&new_body, olist);
7825 if (ctx->cancellable)
7826 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7827 gimple_seq_add_seq (&new_body, dlist);
7829 new_body = maybe_catch_exception (new_body);
7831 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7832 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7833 t = gimple_build_omp_return (nowait);
7834 gimple_seq_add_stmt (&new_body, t);
7835 gimple_seq_add_seq (&new_body, tred_dlist);
7836 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7838 if (rclauses)
7839 OMP_CLAUSE_DECL (rclauses) = rtmp;
7841 gimple_bind_set_body (new_stmt, new_body);
7845 /* A subroutine of lower_omp_single. Expand the simple form of
7846 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7848 if (GOMP_single_start ())
7849 BODY;
7850 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7852 FIXME. It may be better to delay expanding the logic of this until
7853 pass_expand_omp. The expanded logic may make the job more difficult
7854 to a synchronization analysis pass. */
7856 static void
7857 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7859 location_t loc = gimple_location (single_stmt);
7860 tree tlabel = create_artificial_label (loc);
7861 tree flabel = create_artificial_label (loc);
7862 gimple *call, *cond;
7863 tree lhs, decl;
7865 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7866 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7867 call = gimple_build_call (decl, 0);
7868 gimple_call_set_lhs (call, lhs);
7869 gimple_seq_add_stmt (pre_p, call);
7871 cond = gimple_build_cond (EQ_EXPR, lhs,
7872 fold_convert_loc (loc, TREE_TYPE (lhs),
7873 boolean_true_node),
7874 tlabel, flabel);
7875 gimple_seq_add_stmt (pre_p, cond);
7876 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7877 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7878 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7882 /* A subroutine of lower_omp_single. Expand the simple form of
7883 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7885 #pragma omp single copyprivate (a, b, c)
7887 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7890 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7892 BODY;
7893 copyout.a = a;
7894 copyout.b = b;
7895 copyout.c = c;
7896 GOMP_single_copy_end (&copyout);
7898 else
7900 a = copyout_p->a;
7901 b = copyout_p->b;
7902 c = copyout_p->c;
7904 GOMP_barrier ();
7907 FIXME. It may be better to delay expanding the logic of this until
7908 pass_expand_omp. The expanded logic may make the job more difficult
7909 to a synchronization analysis pass. */
7911 static void
7912 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7913 omp_context *ctx)
7915 tree ptr_type, t, l0, l1, l2, bfn_decl;
7916 gimple_seq copyin_seq;
7917 location_t loc = gimple_location (single_stmt);
7919 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7921 ptr_type = build_pointer_type (ctx->record_type);
7922 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7924 l0 = create_artificial_label (loc);
7925 l1 = create_artificial_label (loc);
7926 l2 = create_artificial_label (loc);
7928 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7929 t = build_call_expr_loc (loc, bfn_decl, 0);
7930 t = fold_convert_loc (loc, ptr_type, t);
7931 gimplify_assign (ctx->receiver_decl, t, pre_p);
7933 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7934 build_int_cst (ptr_type, 0));
7935 t = build3 (COND_EXPR, void_type_node, t,
7936 build_and_jump (&l0), build_and_jump (&l1));
7937 gimplify_and_add (t, pre_p);
7939 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
7941 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7943 copyin_seq = NULL;
7944 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7945 &copyin_seq, ctx);
7947 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7948 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7949 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7950 gimplify_and_add (t, pre_p);
7952 t = build_and_jump (&l2);
7953 gimplify_and_add (t, pre_p);
7955 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
7957 gimple_seq_add_seq (pre_p, copyin_seq);
7959 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
7963 /* Expand code for an OpenMP single directive. */
7965 static void
7966 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7968 tree block;
7969 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7970 gbind *bind;
7971 gimple_seq bind_body, bind_body_tail = NULL, dlist;
7973 push_gimplify_context ();
7975 block = make_node (BLOCK);
7976 bind = gimple_build_bind (NULL, NULL, block);
7977 gsi_replace (gsi_p, bind, true);
7978 bind_body = NULL;
7979 dlist = NULL;
7980 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
7981 &bind_body, &dlist, ctx, NULL);
7982 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
7984 gimple_seq_add_stmt (&bind_body, single_stmt);
7986 if (ctx->record_type)
7987 lower_omp_single_copy (single_stmt, &bind_body, ctx);
7988 else
7989 lower_omp_single_simple (single_stmt, &bind_body);
7991 gimple_omp_set_body (single_stmt, NULL);
7993 gimple_seq_add_seq (&bind_body, dlist);
7995 bind_body = maybe_catch_exception (bind_body);
7997 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
7998 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7999 gimple *g = gimple_build_omp_return (nowait);
8000 gimple_seq_add_stmt (&bind_body_tail, g);
8001 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8002 if (ctx->record_type)
8004 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8005 tree clobber = build_clobber (ctx->record_type);
8006 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8007 clobber), GSI_SAME_STMT);
8009 gimple_seq_add_seq (&bind_body, bind_body_tail);
8010 gimple_bind_set_body (bind, bind_body);
8012 pop_gimplify_context (bind);
8014 gimple_bind_append_vars (bind, ctx->block_vars);
8015 BLOCK_VARS (block) = ctx->block_vars;
8016 if (BLOCK_VARS (block))
8017 TREE_USED (block) = 1;
8021 /* Expand code for an OpenMP master directive. */
8023 static void
8024 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8026 tree block, lab = NULL, x, bfn_decl;
8027 gimple *stmt = gsi_stmt (*gsi_p);
8028 gbind *bind;
8029 location_t loc = gimple_location (stmt);
8030 gimple_seq tseq;
8032 push_gimplify_context ();
8034 block = make_node (BLOCK);
8035 bind = gimple_build_bind (NULL, NULL, block);
8036 gsi_replace (gsi_p, bind, true);
8037 gimple_bind_add_stmt (bind, stmt);
8039 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8040 x = build_call_expr_loc (loc, bfn_decl, 0);
8041 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8042 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8043 tseq = NULL;
8044 gimplify_and_add (x, &tseq);
8045 gimple_bind_add_seq (bind, tseq);
8047 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8048 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8049 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8050 gimple_omp_set_body (stmt, NULL);
8052 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8054 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8056 pop_gimplify_context (bind);
8058 gimple_bind_append_vars (bind, ctx->block_vars);
8059 BLOCK_VARS (block) = ctx->block_vars;
8062 /* Helper function for lower_omp_task_reductions. For a specific PASS
8063 find out the current clause it should be processed, or return false
8064 if all have been processed already. */
8066 static inline bool
8067 omp_task_reduction_iterate (int pass, enum tree_code code,
8068 enum omp_clause_code ccode, tree *c, tree *decl,
8069 tree *type, tree *next)
8071 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8073 if (ccode == OMP_CLAUSE_REDUCTION
8074 && code != OMP_TASKLOOP
8075 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8076 continue;
8077 *decl = OMP_CLAUSE_DECL (*c);
8078 *type = TREE_TYPE (*decl);
8079 if (TREE_CODE (*decl) == MEM_REF)
8081 if (pass != 1)
8082 continue;
8084 else
8086 if (omp_is_reference (*decl))
8087 *type = TREE_TYPE (*type);
8088 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8089 continue;
8091 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8092 return true;
8094 *decl = NULL_TREE;
8095 *type = NULL_TREE;
8096 *next = NULL_TREE;
8097 return false;
8100 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8101 OMP_TASKGROUP only with task modifier). Register mapping of those in
8102 START sequence and reducing them and unregister them in the END sequence. */
8104 static void
8105 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8106 gimple_seq *start, gimple_seq *end)
8108 enum omp_clause_code ccode
8109 = (code == OMP_TASKGROUP
8110 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8111 tree cancellable = NULL_TREE;
8112 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8113 if (clauses == NULL_TREE)
8114 return;
8115 if (code == OMP_FOR || code == OMP_SECTIONS)
8117 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8118 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8119 && outer->cancellable)
8121 cancellable = error_mark_node;
8122 break;
8124 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8125 break;
8127 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8128 tree *last = &TYPE_FIELDS (record_type);
8129 unsigned cnt = 0;
8130 if (cancellable)
8132 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8133 ptr_type_node);
8134 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8135 integer_type_node);
8136 *last = field;
8137 DECL_CHAIN (field) = ifield;
8138 last = &DECL_CHAIN (ifield);
8139 DECL_CONTEXT (field) = record_type;
8140 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8141 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8142 DECL_CONTEXT (ifield) = record_type;
8143 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8144 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8146 for (int pass = 0; pass < 2; pass++)
8148 tree decl, type, next;
8149 for (tree c = clauses;
8150 omp_task_reduction_iterate (pass, code, ccode,
8151 &c, &decl, &type, &next); c = next)
8153 ++cnt;
8154 tree new_type = type;
8155 if (ctx->outer)
8156 new_type = remap_type (type, &ctx->outer->cb);
8157 tree field
8158 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8159 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8160 new_type);
8161 if (DECL_P (decl) && type == TREE_TYPE (decl))
8163 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8164 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8165 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8167 else
8168 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8169 DECL_CONTEXT (field) = record_type;
8170 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8171 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8172 *last = field;
8173 last = &DECL_CHAIN (field);
8174 tree bfield
8175 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8176 boolean_type_node);
8177 DECL_CONTEXT (bfield) = record_type;
8178 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8179 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8180 *last = bfield;
8181 last = &DECL_CHAIN (bfield);
8184 *last = NULL_TREE;
8185 layout_type (record_type);
8187 /* Build up an array which registers with the runtime all the reductions
8188 and deregisters them at the end. Format documented in libgomp/task.c. */
8189 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8190 tree avar = create_tmp_var_raw (atype);
8191 gimple_add_tmp_var (avar);
8192 TREE_ADDRESSABLE (avar) = 1;
8193 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8194 NULL_TREE, NULL_TREE);
8195 tree t = build_int_cst (pointer_sized_int_node, cnt);
8196 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8197 gimple_seq seq = NULL;
8198 tree sz = fold_convert (pointer_sized_int_node,
8199 TYPE_SIZE_UNIT (record_type));
8200 int cachesz = 64;
8201 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8202 build_int_cst (pointer_sized_int_node, cachesz - 1));
8203 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8204 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8205 ctx->task_reductions.create (1 + cnt);
8206 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8207 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8208 ? sz : NULL_TREE);
8209 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8210 gimple_seq_add_seq (start, seq);
8211 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8212 NULL_TREE, NULL_TREE);
8213 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8214 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8215 NULL_TREE, NULL_TREE);
8216 t = build_int_cst (pointer_sized_int_node,
8217 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8218 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8219 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8220 NULL_TREE, NULL_TREE);
8221 t = build_int_cst (pointer_sized_int_node, -1);
8222 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8223 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8224 NULL_TREE, NULL_TREE);
8225 t = build_int_cst (pointer_sized_int_node, 0);
8226 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8228 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8229 and for each task reduction checks a bool right after the private variable
8230 within that thread's chunk; if the bool is clear, it hasn't been
8231 initialized and thus isn't going to be reduced nor destructed, otherwise
8232 reduce and destruct it. */
8233 tree idx = create_tmp_var (size_type_node);
8234 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8235 tree num_thr_sz = create_tmp_var (size_type_node);
8236 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8237 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8238 tree lab3 = NULL_TREE;
8239 gimple *g;
8240 if (code == OMP_FOR || code == OMP_SECTIONS)
8242 /* For worksharing constructs, only perform it in the master thread,
8243 with the exception of cancelled implicit barriers - then only handle
8244 the current thread. */
8245 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8246 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8247 tree thr_num = create_tmp_var (integer_type_node);
8248 g = gimple_build_call (t, 0);
8249 gimple_call_set_lhs (g, thr_num);
8250 gimple_seq_add_stmt (end, g);
8251 if (cancellable)
8253 tree c;
8254 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8255 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8256 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8257 if (code == OMP_FOR)
8258 c = gimple_omp_for_clauses (ctx->stmt);
8259 else /* if (code == OMP_SECTIONS) */
8260 c = gimple_omp_sections_clauses (ctx->stmt);
8261 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8262 cancellable = c;
8263 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8264 lab5, lab6);
8265 gimple_seq_add_stmt (end, g);
8266 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8267 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8268 gimple_seq_add_stmt (end, g);
8269 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8270 build_one_cst (TREE_TYPE (idx)));
8271 gimple_seq_add_stmt (end, g);
8272 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8273 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8275 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8276 gimple_seq_add_stmt (end, g);
8277 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8279 if (code != OMP_PARALLEL)
8281 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8282 tree num_thr = create_tmp_var (integer_type_node);
8283 g = gimple_build_call (t, 0);
8284 gimple_call_set_lhs (g, num_thr);
8285 gimple_seq_add_stmt (end, g);
8286 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8287 gimple_seq_add_stmt (end, g);
8288 if (cancellable)
8289 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8291 else
8293 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8294 OMP_CLAUSE__REDUCTEMP_);
8295 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8296 t = fold_convert (size_type_node, t);
8297 gimplify_assign (num_thr_sz, t, end);
8299 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8300 NULL_TREE, NULL_TREE);
8301 tree data = create_tmp_var (pointer_sized_int_node);
8302 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8303 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8304 tree ptr;
8305 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8306 ptr = create_tmp_var (build_pointer_type (record_type));
8307 else
8308 ptr = create_tmp_var (ptr_type_node);
8309 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8311 tree field = TYPE_FIELDS (record_type);
8312 cnt = 0;
8313 if (cancellable)
8314 field = DECL_CHAIN (DECL_CHAIN (field));
8315 for (int pass = 0; pass < 2; pass++)
8317 tree decl, type, next;
8318 for (tree c = clauses;
8319 omp_task_reduction_iterate (pass, code, ccode,
8320 &c, &decl, &type, &next); c = next)
8322 tree var = decl, ref;
8323 if (TREE_CODE (decl) == MEM_REF)
8325 var = TREE_OPERAND (var, 0);
8326 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8327 var = TREE_OPERAND (var, 0);
8328 tree v = var;
8329 if (TREE_CODE (var) == ADDR_EXPR)
8330 var = TREE_OPERAND (var, 0);
8331 else if (TREE_CODE (var) == INDIRECT_REF)
8332 var = TREE_OPERAND (var, 0);
8333 tree orig_var = var;
8334 if (is_variable_sized (var))
8336 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8337 var = DECL_VALUE_EXPR (var);
8338 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8339 var = TREE_OPERAND (var, 0);
8340 gcc_assert (DECL_P (var));
8342 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8343 if (orig_var != var)
8344 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8345 else if (TREE_CODE (v) == ADDR_EXPR)
8346 t = build_fold_addr_expr (t);
8347 else if (TREE_CODE (v) == INDIRECT_REF)
8348 t = build_fold_indirect_ref (t);
8349 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8351 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8352 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8353 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8355 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8356 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8357 fold_convert (size_type_node,
8358 TREE_OPERAND (decl, 1)));
8360 else
8362 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8363 if (!omp_is_reference (decl))
8364 t = build_fold_addr_expr (t);
8366 t = fold_convert (pointer_sized_int_node, t);
8367 seq = NULL;
8368 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8369 gimple_seq_add_seq (start, seq);
8370 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8371 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8372 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8373 t = unshare_expr (byte_position (field));
8374 t = fold_convert (pointer_sized_int_node, t);
8375 ctx->task_reduction_map->put (c, cnt);
8376 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8377 ? t : NULL_TREE);
8378 seq = NULL;
8379 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8380 gimple_seq_add_seq (start, seq);
8381 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8382 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8383 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8385 tree bfield = DECL_CHAIN (field);
8386 tree cond;
8387 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8388 /* In parallel or worksharing all threads unconditionally
8389 initialize all their task reduction private variables. */
8390 cond = boolean_true_node;
8391 else if (TREE_TYPE (ptr) == ptr_type_node)
8393 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8394 unshare_expr (byte_position (bfield)));
8395 seq = NULL;
8396 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8397 gimple_seq_add_seq (end, seq);
8398 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8399 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8400 build_int_cst (pbool, 0));
8402 else
8403 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8404 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8405 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8406 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8407 tree condv = create_tmp_var (boolean_type_node);
8408 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8409 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8410 lab3, lab4);
8411 gimple_seq_add_stmt (end, g);
8412 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8413 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8415 /* If this reduction doesn't need destruction and parallel
8416 has been cancelled, there is nothing to do for this
8417 reduction, so jump around the merge operation. */
8418 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8419 g = gimple_build_cond (NE_EXPR, cancellable,
8420 build_zero_cst (TREE_TYPE (cancellable)),
8421 lab4, lab5);
8422 gimple_seq_add_stmt (end, g);
8423 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8426 tree new_var;
8427 if (TREE_TYPE (ptr) == ptr_type_node)
8429 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8430 unshare_expr (byte_position (field)));
8431 seq = NULL;
8432 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8433 gimple_seq_add_seq (end, seq);
8434 tree pbool = build_pointer_type (TREE_TYPE (field));
8435 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8436 build_int_cst (pbool, 0));
8438 else
8439 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8440 build_simple_mem_ref (ptr), field, NULL_TREE);
8442 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8443 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8444 ref = build_simple_mem_ref (ref);
8445 /* reduction(-:var) sums up the partial results, so it acts
8446 identically to reduction(+:var). */
8447 if (rcode == MINUS_EXPR)
8448 rcode = PLUS_EXPR;
8449 if (TREE_CODE (decl) == MEM_REF)
8451 tree type = TREE_TYPE (new_var);
8452 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8453 tree i = create_tmp_var (TREE_TYPE (v));
8454 tree ptype = build_pointer_type (TREE_TYPE (type));
8455 if (DECL_P (v))
8457 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8458 tree vv = create_tmp_var (TREE_TYPE (v));
8459 gimplify_assign (vv, v, start);
8460 v = vv;
8462 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8463 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8464 new_var = build_fold_addr_expr (new_var);
8465 new_var = fold_convert (ptype, new_var);
8466 ref = fold_convert (ptype, ref);
8467 tree m = create_tmp_var (ptype);
8468 gimplify_assign (m, new_var, end);
8469 new_var = m;
8470 m = create_tmp_var (ptype);
8471 gimplify_assign (m, ref, end);
8472 ref = m;
8473 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8474 tree body = create_artificial_label (UNKNOWN_LOCATION);
8475 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8476 gimple_seq_add_stmt (end, gimple_build_label (body));
8477 tree priv = build_simple_mem_ref (new_var);
8478 tree out = build_simple_mem_ref (ref);
8479 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8481 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8482 tree decl_placeholder
8483 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8484 tree lab6 = NULL_TREE;
8485 if (cancellable)
8487 /* If this reduction needs destruction and parallel
8488 has been cancelled, jump around the merge operation
8489 to the destruction. */
8490 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8491 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8492 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8493 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8494 lab6, lab5);
8495 gimple_seq_add_stmt (end, g);
8496 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8498 SET_DECL_VALUE_EXPR (placeholder, out);
8499 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8500 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8501 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8502 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8503 gimple_seq_add_seq (end,
8504 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8505 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8506 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8508 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8509 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8511 if (cancellable)
8512 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8513 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8514 if (x)
8516 gimple_seq tseq = NULL;
8517 gimplify_stmt (&x, &tseq);
8518 gimple_seq_add_seq (end, tseq);
8521 else
8523 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8524 out = unshare_expr (out);
8525 gimplify_assign (out, x, end);
8527 gimple *g
8528 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8529 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8530 gimple_seq_add_stmt (end, g);
8531 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8532 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8533 gimple_seq_add_stmt (end, g);
8534 g = gimple_build_assign (i, PLUS_EXPR, i,
8535 build_int_cst (TREE_TYPE (i), 1));
8536 gimple_seq_add_stmt (end, g);
8537 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8538 gimple_seq_add_stmt (end, g);
8539 gimple_seq_add_stmt (end, gimple_build_label (endl));
8541 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8543 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8544 tree oldv = NULL_TREE;
8545 tree lab6 = NULL_TREE;
8546 if (cancellable)
8548 /* If this reduction needs destruction and parallel
8549 has been cancelled, jump around the merge operation
8550 to the destruction. */
8551 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8552 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8553 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8554 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8555 lab6, lab5);
8556 gimple_seq_add_stmt (end, g);
8557 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8559 if (omp_is_reference (decl)
8560 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8561 TREE_TYPE (ref)))
8562 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8563 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8564 tree refv = create_tmp_var (TREE_TYPE (ref));
8565 gimplify_assign (refv, ref, end);
8566 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8567 SET_DECL_VALUE_EXPR (placeholder, ref);
8568 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8569 tree d = maybe_lookup_decl (decl, ctx);
8570 gcc_assert (d);
8571 if (DECL_HAS_VALUE_EXPR_P (d))
8572 oldv = DECL_VALUE_EXPR (d);
8573 if (omp_is_reference (var))
8575 tree v = fold_convert (TREE_TYPE (d),
8576 build_fold_addr_expr (new_var));
8577 SET_DECL_VALUE_EXPR (d, v);
8579 else
8580 SET_DECL_VALUE_EXPR (d, new_var);
8581 DECL_HAS_VALUE_EXPR_P (d) = 1;
8582 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8583 if (oldv)
8584 SET_DECL_VALUE_EXPR (d, oldv);
8585 else
8587 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8588 DECL_HAS_VALUE_EXPR_P (d) = 0;
8590 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8591 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8592 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8593 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8594 if (cancellable)
8595 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8596 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8597 if (x)
8599 gimple_seq tseq = NULL;
8600 gimplify_stmt (&x, &tseq);
8601 gimple_seq_add_seq (end, tseq);
8604 else
8606 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8607 ref = unshare_expr (ref);
8608 gimplify_assign (ref, x, end);
8610 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8611 ++cnt;
8612 field = DECL_CHAIN (bfield);
8616 if (code == OMP_TASKGROUP)
8618 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8619 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8620 gimple_seq_add_stmt (start, g);
8622 else
8624 tree c;
8625 if (code == OMP_FOR)
8626 c = gimple_omp_for_clauses (ctx->stmt);
8627 else if (code == OMP_SECTIONS)
8628 c = gimple_omp_sections_clauses (ctx->stmt);
8629 else
8630 c = gimple_omp_taskreg_clauses (ctx->stmt);
8631 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8632 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8633 build_fold_addr_expr (avar));
8634 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8637 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8638 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8639 size_one_node));
8640 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8641 gimple_seq_add_stmt (end, g);
8642 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8643 if (code == OMP_FOR || code == OMP_SECTIONS)
8645 enum built_in_function bfn
8646 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8647 t = builtin_decl_explicit (bfn);
8648 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8649 tree arg;
8650 if (cancellable)
8652 arg = create_tmp_var (c_bool_type);
8653 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8654 cancellable));
8656 else
8657 arg = build_int_cst (c_bool_type, 0);
8658 g = gimple_build_call (t, 1, arg);
8660 else
8662 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8663 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8665 gimple_seq_add_stmt (end, g);
8666 t = build_constructor (atype, NULL);
8667 TREE_THIS_VOLATILE (t) = 1;
8668 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8671 /* Expand code for an OpenMP taskgroup directive. */
8673 static void
8674 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8676 gimple *stmt = gsi_stmt (*gsi_p);
8677 gcall *x;
8678 gbind *bind;
8679 gimple_seq dseq = NULL;
8680 tree block = make_node (BLOCK);
8682 bind = gimple_build_bind (NULL, NULL, block);
8683 gsi_replace (gsi_p, bind, true);
8684 gimple_bind_add_stmt (bind, stmt);
8686 push_gimplify_context ();
8688 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8690 gimple_bind_add_stmt (bind, x);
8692 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8693 gimple_omp_taskgroup_clauses (stmt),
8694 gimple_bind_body_ptr (bind), &dseq);
8696 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8697 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8698 gimple_omp_set_body (stmt, NULL);
8700 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8701 gimple_bind_add_seq (bind, dseq);
8703 pop_gimplify_context (bind);
8705 gimple_bind_append_vars (bind, ctx->block_vars);
8706 BLOCK_VARS (block) = ctx->block_vars;
8710 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8712 static void
8713 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8714 omp_context *ctx)
8716 struct omp_for_data fd;
8717 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8718 return;
8720 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8721 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8722 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8723 if (!fd.ordered)
8724 return;
8726 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8727 tree c = gimple_omp_ordered_clauses (ord_stmt);
8728 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8729 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8731 /* Merge depend clauses from multiple adjacent
8732 #pragma omp ordered depend(sink:...) constructs
8733 into one #pragma omp ordered depend(sink:...), so that
8734 we can optimize them together. */
8735 gimple_stmt_iterator gsi = *gsi_p;
8736 gsi_next (&gsi);
8737 while (!gsi_end_p (gsi))
8739 gimple *stmt = gsi_stmt (gsi);
8740 if (is_gimple_debug (stmt)
8741 || gimple_code (stmt) == GIMPLE_NOP)
8743 gsi_next (&gsi);
8744 continue;
8746 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8747 break;
8748 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8749 c = gimple_omp_ordered_clauses (ord_stmt2);
8750 if (c == NULL_TREE
8751 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8752 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8753 break;
8754 while (*list_p)
8755 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8756 *list_p = c;
8757 gsi_remove (&gsi, true);
8761 /* Canonicalize sink dependence clauses into one folded clause if
8762 possible.
8764 The basic algorithm is to create a sink vector whose first
8765 element is the GCD of all the first elements, and whose remaining
8766 elements are the minimum of the subsequent columns.
8768 We ignore dependence vectors whose first element is zero because
8769 such dependencies are known to be executed by the same thread.
8771 We take into account the direction of the loop, so a minimum
8772 becomes a maximum if the loop is iterating forwards. We also
8773 ignore sink clauses where the loop direction is unknown, or where
8774 the offsets are clearly invalid because they are not a multiple
8775 of the loop increment.
8777 For example:
8779 #pragma omp for ordered(2)
8780 for (i=0; i < N; ++i)
8781 for (j=0; j < M; ++j)
8783 #pragma omp ordered \
8784 depend(sink:i-8,j-2) \
8785 depend(sink:i,j-1) \ // Completely ignored because i+0.
8786 depend(sink:i-4,j-3) \
8787 depend(sink:i-6,j-4)
8788 #pragma omp ordered depend(source)
8791 Folded clause is:
8793 depend(sink:-gcd(8,4,6),-min(2,3,4))
8794 -or-
8795 depend(sink:-2,-2)
8798 /* FIXME: Computing GCD's where the first element is zero is
8799 non-trivial in the presence of collapsed loops. Do this later. */
8800 if (fd.collapse > 1)
8801 return;
8803 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8805 /* wide_int is not a POD so it must be default-constructed. */
8806 for (unsigned i = 0; i != 2 * len - 1; ++i)
8807 new (static_cast<void*>(folded_deps + i)) wide_int ();
8809 tree folded_dep = NULL_TREE;
8810 /* TRUE if the first dimension's offset is negative. */
8811 bool neg_offset_p = false;
8813 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8814 unsigned int i;
8815 while ((c = *list_p) != NULL)
8817 bool remove = false;
8819 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8820 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8821 goto next_ordered_clause;
8823 tree vec;
8824 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8825 vec && TREE_CODE (vec) == TREE_LIST;
8826 vec = TREE_CHAIN (vec), ++i)
8828 gcc_assert (i < len);
8830 /* omp_extract_for_data has canonicalized the condition. */
8831 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8832 || fd.loops[i].cond_code == GT_EXPR);
8833 bool forward = fd.loops[i].cond_code == LT_EXPR;
8834 bool maybe_lexically_later = true;
8836 /* While the committee makes up its mind, bail if we have any
8837 non-constant steps. */
8838 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8839 goto lower_omp_ordered_ret;
8841 tree itype = TREE_TYPE (TREE_VALUE (vec));
8842 if (POINTER_TYPE_P (itype))
8843 itype = sizetype;
8844 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8845 TYPE_PRECISION (itype),
8846 TYPE_SIGN (itype));
8848 /* Ignore invalid offsets that are not multiples of the step. */
8849 if (!wi::multiple_of_p (wi::abs (offset),
8850 wi::abs (wi::to_wide (fd.loops[i].step)),
8851 UNSIGNED))
8853 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8854 "ignoring sink clause with offset that is not "
8855 "a multiple of the loop step");
8856 remove = true;
8857 goto next_ordered_clause;
8860 /* Calculate the first dimension. The first dimension of
8861 the folded dependency vector is the GCD of the first
8862 elements, while ignoring any first elements whose offset
8863 is 0. */
8864 if (i == 0)
8866 /* Ignore dependence vectors whose first dimension is 0. */
8867 if (offset == 0)
8869 remove = true;
8870 goto next_ordered_clause;
8872 else
8874 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8876 error_at (OMP_CLAUSE_LOCATION (c),
8877 "first offset must be in opposite direction "
8878 "of loop iterations");
8879 goto lower_omp_ordered_ret;
8881 if (forward)
8882 offset = -offset;
8883 neg_offset_p = forward;
8884 /* Initialize the first time around. */
8885 if (folded_dep == NULL_TREE)
8887 folded_dep = c;
8888 folded_deps[0] = offset;
8890 else
8891 folded_deps[0] = wi::gcd (folded_deps[0],
8892 offset, UNSIGNED);
8895 /* Calculate minimum for the remaining dimensions. */
8896 else
8898 folded_deps[len + i - 1] = offset;
8899 if (folded_dep == c)
8900 folded_deps[i] = offset;
8901 else if (maybe_lexically_later
8902 && !wi::eq_p (folded_deps[i], offset))
8904 if (forward ^ wi::gts_p (folded_deps[i], offset))
8906 unsigned int j;
8907 folded_dep = c;
8908 for (j = 1; j <= i; j++)
8909 folded_deps[j] = folded_deps[len + j - 1];
8911 else
8912 maybe_lexically_later = false;
8916 gcc_assert (i == len);
8918 remove = true;
8920 next_ordered_clause:
8921 if (remove)
8922 *list_p = OMP_CLAUSE_CHAIN (c);
8923 else
8924 list_p = &OMP_CLAUSE_CHAIN (c);
8927 if (folded_dep)
8929 if (neg_offset_p)
8930 folded_deps[0] = -folded_deps[0];
8932 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8933 if (POINTER_TYPE_P (itype))
8934 itype = sizetype;
8936 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8937 = wide_int_to_tree (itype, folded_deps[0]);
8938 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8939 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
8942 lower_omp_ordered_ret:
8944 /* Ordered without clauses is #pragma omp threads, while we want
8945 a nop instead if we remove all clauses. */
8946 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8947 gsi_replace (gsi_p, gimple_build_nop (), true);
8951 /* Expand code for an OpenMP ordered directive. */
8953 static void
8954 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8956 tree block;
8957 gimple *stmt = gsi_stmt (*gsi_p), *g;
8958 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8959 gcall *x;
8960 gbind *bind;
8961 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8962 OMP_CLAUSE_SIMD);
8963 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8964 loop. */
8965 bool maybe_simt
8966 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8967 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8968 OMP_CLAUSE_THREADS);
8970 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8971 OMP_CLAUSE_DEPEND))
8973 /* FIXME: This is needs to be moved to the expansion to verify various
8974 conditions only testable on cfg with dominators computed, and also
8975 all the depend clauses to be merged still might need to be available
8976 for the runtime checks. */
8977 if (0)
8978 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
8979 return;
8982 push_gimplify_context ();
8984 block = make_node (BLOCK);
8985 bind = gimple_build_bind (NULL, NULL, block);
8986 gsi_replace (gsi_p, bind, true);
8987 gimple_bind_add_stmt (bind, stmt);
8989 if (simd)
8991 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
8992 build_int_cst (NULL_TREE, threads));
8993 cfun->has_simduid_loops = true;
8995 else
8996 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8998 gimple_bind_add_stmt (bind, x);
9000 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9001 if (maybe_simt)
9003 counter = create_tmp_var (integer_type_node);
9004 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9005 gimple_call_set_lhs (g, counter);
9006 gimple_bind_add_stmt (bind, g);
9008 body = create_artificial_label (UNKNOWN_LOCATION);
9009 test = create_artificial_label (UNKNOWN_LOCATION);
9010 gimple_bind_add_stmt (bind, gimple_build_label (body));
9012 tree simt_pred = create_tmp_var (integer_type_node);
9013 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9014 gimple_call_set_lhs (g, simt_pred);
9015 gimple_bind_add_stmt (bind, g);
9017 tree t = create_artificial_label (UNKNOWN_LOCATION);
9018 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9019 gimple_bind_add_stmt (bind, g);
9021 gimple_bind_add_stmt (bind, gimple_build_label (t));
9023 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9024 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9025 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9026 gimple_omp_set_body (stmt, NULL);
9028 if (maybe_simt)
9030 gimple_bind_add_stmt (bind, gimple_build_label (test));
9031 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9032 gimple_bind_add_stmt (bind, g);
9034 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9035 tree nonneg = create_tmp_var (integer_type_node);
9036 gimple_seq tseq = NULL;
9037 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9038 gimple_bind_add_seq (bind, tseq);
9040 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9041 gimple_call_set_lhs (g, nonneg);
9042 gimple_bind_add_stmt (bind, g);
9044 tree end = create_artificial_label (UNKNOWN_LOCATION);
9045 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9046 gimple_bind_add_stmt (bind, g);
9048 gimple_bind_add_stmt (bind, gimple_build_label (end));
9050 if (simd)
9051 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9052 build_int_cst (NULL_TREE, threads));
9053 else
9054 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9056 gimple_bind_add_stmt (bind, x);
9058 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9060 pop_gimplify_context (bind);
9062 gimple_bind_append_vars (bind, ctx->block_vars);
9063 BLOCK_VARS (block) = gimple_bind_vars (bind);
9067 /* Expand code for an OpenMP scan directive and the structured block
9068 before the scan directive. */
9070 static void
9071 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9073 gimple *stmt = gsi_stmt (*gsi_p);
9074 bool has_clauses
9075 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9076 tree lane = NULL_TREE;
9077 gimple_seq before = NULL;
9078 omp_context *octx = ctx->outer;
9079 gcc_assert (octx);
9080 if (octx->scan_exclusive && !has_clauses)
9082 gimple_stmt_iterator gsi2 = *gsi_p;
9083 gsi_next (&gsi2);
9084 gimple *stmt2 = gsi_stmt (gsi2);
9085 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9086 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9087 the one with exclusive clause(s), comes first. */
9088 if (stmt2
9089 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9090 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9092 gsi_remove (gsi_p, false);
9093 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9094 ctx = maybe_lookup_ctx (stmt2);
9095 gcc_assert (ctx);
9096 lower_omp_scan (gsi_p, ctx);
9097 return;
9101 bool input_phase = has_clauses ^ octx->scan_inclusive;
9102 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9103 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9104 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9105 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9106 && !gimple_omp_for_combined_p (octx->stmt));
9107 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9108 if (is_for_simd && octx->for_simd_scan_phase)
9109 is_simd = false;
9110 if (is_simd)
9111 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9112 OMP_CLAUSE__SIMDUID_))
9114 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9115 lane = create_tmp_var (unsigned_type_node);
9116 tree t = build_int_cst (integer_type_node,
9117 input_phase ? 1
9118 : octx->scan_inclusive ? 2 : 3);
9119 gimple *g
9120 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9121 gimple_call_set_lhs (g, lane);
9122 gimple_seq_add_stmt (&before, g);
9125 if (is_simd || is_for)
9127 for (tree c = gimple_omp_for_clauses (octx->stmt);
9128 c; c = OMP_CLAUSE_CHAIN (c))
9129 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9130 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9132 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9133 tree var = OMP_CLAUSE_DECL (c);
9134 tree new_var = lookup_decl (var, octx);
9135 tree val = new_var;
9136 tree var2 = NULL_TREE;
9137 tree var3 = NULL_TREE;
9138 tree var4 = NULL_TREE;
9139 tree lane0 = NULL_TREE;
9140 tree new_vard = new_var;
9141 if (omp_is_reference (var))
9143 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9144 val = new_var;
9146 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9148 val = DECL_VALUE_EXPR (new_vard);
9149 if (new_vard != new_var)
9151 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9152 val = TREE_OPERAND (val, 0);
9154 if (TREE_CODE (val) == ARRAY_REF
9155 && VAR_P (TREE_OPERAND (val, 0)))
9157 tree v = TREE_OPERAND (val, 0);
9158 if (lookup_attribute ("omp simd array",
9159 DECL_ATTRIBUTES (v)))
9161 val = unshare_expr (val);
9162 lane0 = TREE_OPERAND (val, 1);
9163 TREE_OPERAND (val, 1) = lane;
9164 var2 = lookup_decl (v, octx);
9165 if (octx->scan_exclusive)
9166 var4 = lookup_decl (var2, octx);
9167 if (input_phase
9168 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9169 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9170 if (!input_phase)
9172 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9173 var2, lane, NULL_TREE, NULL_TREE);
9174 TREE_THIS_NOTRAP (var2) = 1;
9175 if (octx->scan_exclusive)
9177 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9178 var4, lane, NULL_TREE,
9179 NULL_TREE);
9180 TREE_THIS_NOTRAP (var4) = 1;
9183 else
9184 var2 = val;
9187 gcc_assert (var2);
9189 else
9191 var2 = build_outer_var_ref (var, octx);
9192 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9194 var3 = maybe_lookup_decl (new_vard, octx);
9195 if (var3 == new_vard || var3 == NULL_TREE)
9196 var3 = NULL_TREE;
9197 else if (is_simd && octx->scan_exclusive && !input_phase)
9199 var4 = maybe_lookup_decl (var3, octx);
9200 if (var4 == var3 || var4 == NULL_TREE)
9202 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9204 var4 = var3;
9205 var3 = NULL_TREE;
9207 else
9208 var4 = NULL_TREE;
9212 if (is_simd
9213 && octx->scan_exclusive
9214 && !input_phase
9215 && var4 == NULL_TREE)
9216 var4 = create_tmp_var (TREE_TYPE (val));
9218 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9220 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9221 if (input_phase)
9223 if (var3)
9225 /* If we've added a separate identity element
9226 variable, copy it over into val. */
9227 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9228 var3);
9229 gimplify_and_add (x, &before);
9231 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9233 /* Otherwise, assign to it the identity element. */
9234 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9235 if (is_for)
9236 tseq = copy_gimple_seq_and_replace_locals (tseq);
9237 tree ref = build_outer_var_ref (var, octx);
9238 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9239 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9240 if (x)
9242 if (new_vard != new_var)
9243 val = build_fold_addr_expr_loc (clause_loc, val);
9244 SET_DECL_VALUE_EXPR (new_vard, val);
9246 SET_DECL_VALUE_EXPR (placeholder, ref);
9247 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9248 lower_omp (&tseq, octx);
9249 if (x)
9250 SET_DECL_VALUE_EXPR (new_vard, x);
9251 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9252 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9253 gimple_seq_add_seq (&before, tseq);
9254 if (is_simd)
9255 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9258 else if (is_simd)
9260 tree x;
9261 if (octx->scan_exclusive)
9263 tree v4 = unshare_expr (var4);
9264 tree v2 = unshare_expr (var2);
9265 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9266 gimplify_and_add (x, &before);
9268 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9269 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9270 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9271 tree vexpr = val;
9272 if (x && new_vard != new_var)
9273 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9274 if (x)
9275 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9276 SET_DECL_VALUE_EXPR (placeholder, var2);
9277 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9278 lower_omp (&tseq, octx);
9279 gimple_seq_add_seq (&before, tseq);
9280 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9281 if (x)
9282 SET_DECL_VALUE_EXPR (new_vard, x);
9283 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9284 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9285 if (octx->scan_inclusive)
9287 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9288 var2);
9289 gimplify_and_add (x, &before);
9291 else if (lane0 == NULL_TREE)
9293 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9294 var4);
9295 gimplify_and_add (x, &before);
9299 else
9301 if (input_phase)
9303 /* input phase. Set val to initializer before
9304 the body. */
9305 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9306 gimplify_assign (val, x, &before);
9308 else if (is_simd)
9310 /* scan phase. */
9311 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9312 if (code == MINUS_EXPR)
9313 code = PLUS_EXPR;
9315 tree x = build2 (code, TREE_TYPE (var2),
9316 unshare_expr (var2), unshare_expr (val));
9317 if (octx->scan_inclusive)
9319 gimplify_assign (unshare_expr (var2), x, &before);
9320 gimplify_assign (val, var2, &before);
9322 else
9324 gimplify_assign (unshare_expr (var4),
9325 unshare_expr (var2), &before);
9326 gimplify_assign (var2, x, &before);
9327 if (lane0 == NULL_TREE)
9328 gimplify_assign (val, var4, &before);
9332 if (octx->scan_exclusive && !input_phase && lane0)
9334 tree vexpr = unshare_expr (var4);
9335 TREE_OPERAND (vexpr, 1) = lane0;
9336 if (new_vard != new_var)
9337 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9338 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9342 if (is_simd && !is_for_simd)
9344 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9345 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9346 gsi_replace (gsi_p, gimple_build_nop (), true);
9347 return;
9349 lower_omp (gimple_omp_body_ptr (stmt), octx);
9350 if (before)
9352 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9353 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9358 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9359 substitution of a couple of function calls. But in the NAMED case,
9360 requires that languages coordinate a symbol name. It is therefore
9361 best put here in common code. */
9363 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9365 static void
9366 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9368 tree block;
9369 tree name, lock, unlock;
9370 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9371 gbind *bind;
9372 location_t loc = gimple_location (stmt);
9373 gimple_seq tbody;
9375 name = gimple_omp_critical_name (stmt);
9376 if (name)
9378 tree decl;
9380 if (!critical_name_mutexes)
9381 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9383 tree *n = critical_name_mutexes->get (name);
9384 if (n == NULL)
9386 char *new_str;
9388 decl = create_tmp_var_raw (ptr_type_node);
9390 new_str = ACONCAT ((".gomp_critical_user_",
9391 IDENTIFIER_POINTER (name), NULL));
9392 DECL_NAME (decl) = get_identifier (new_str);
9393 TREE_PUBLIC (decl) = 1;
9394 TREE_STATIC (decl) = 1;
9395 DECL_COMMON (decl) = 1;
9396 DECL_ARTIFICIAL (decl) = 1;
9397 DECL_IGNORED_P (decl) = 1;
9399 varpool_node::finalize_decl (decl);
9401 critical_name_mutexes->put (name, decl);
9403 else
9404 decl = *n;
9406 /* If '#pragma omp critical' is inside offloaded region or
9407 inside function marked as offloadable, the symbol must be
9408 marked as offloadable too. */
9409 omp_context *octx;
9410 if (cgraph_node::get (current_function_decl)->offloadable)
9411 varpool_node::get_create (decl)->offloadable = 1;
9412 else
9413 for (octx = ctx->outer; octx; octx = octx->outer)
9414 if (is_gimple_omp_offloaded (octx->stmt))
9416 varpool_node::get_create (decl)->offloadable = 1;
9417 break;
9420 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9421 lock = build_call_expr_loc (loc, lock, 1,
9422 build_fold_addr_expr_loc (loc, decl));
9424 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9425 unlock = build_call_expr_loc (loc, unlock, 1,
9426 build_fold_addr_expr_loc (loc, decl));
9428 else
9430 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9431 lock = build_call_expr_loc (loc, lock, 0);
9433 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9434 unlock = build_call_expr_loc (loc, unlock, 0);
9437 push_gimplify_context ();
9439 block = make_node (BLOCK);
9440 bind = gimple_build_bind (NULL, NULL, block);
9441 gsi_replace (gsi_p, bind, true);
9442 gimple_bind_add_stmt (bind, stmt);
9444 tbody = gimple_bind_body (bind);
9445 gimplify_and_add (lock, &tbody);
9446 gimple_bind_set_body (bind, tbody);
9448 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9449 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9450 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9451 gimple_omp_set_body (stmt, NULL);
9453 tbody = gimple_bind_body (bind);
9454 gimplify_and_add (unlock, &tbody);
9455 gimple_bind_set_body (bind, tbody);
9457 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9459 pop_gimplify_context (bind);
9460 gimple_bind_append_vars (bind, ctx->block_vars);
9461 BLOCK_VARS (block) = gimple_bind_vars (bind);
9464 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9465 for a lastprivate clause. Given a loop control predicate of (V
9466 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9467 is appended to *DLIST, iterator initialization is appended to
9468 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9469 to be emitted in a critical section. */
9471 static void
9472 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9473 gimple_seq *dlist, gimple_seq *clist,
9474 struct omp_context *ctx)
9476 tree clauses, cond, vinit;
9477 enum tree_code cond_code;
9478 gimple_seq stmts;
9480 cond_code = fd->loop.cond_code;
9481 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9483 /* When possible, use a strict equality expression. This can let VRP
9484 type optimizations deduce the value and remove a copy. */
9485 if (tree_fits_shwi_p (fd->loop.step))
9487 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9488 if (step == 1 || step == -1)
9489 cond_code = EQ_EXPR;
9492 tree n2 = fd->loop.n2;
9493 if (fd->collapse > 1
9494 && TREE_CODE (n2) != INTEGER_CST
9495 && gimple_omp_for_combined_into_p (fd->for_stmt))
9497 struct omp_context *taskreg_ctx = NULL;
9498 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9500 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9501 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9502 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9504 if (gimple_omp_for_combined_into_p (gfor))
9506 gcc_assert (ctx->outer->outer
9507 && is_parallel_ctx (ctx->outer->outer));
9508 taskreg_ctx = ctx->outer->outer;
9510 else
9512 struct omp_for_data outer_fd;
9513 omp_extract_for_data (gfor, &outer_fd, NULL);
9514 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9517 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9518 taskreg_ctx = ctx->outer->outer;
9520 else if (is_taskreg_ctx (ctx->outer))
9521 taskreg_ctx = ctx->outer;
9522 if (taskreg_ctx)
9524 int i;
9525 tree taskreg_clauses
9526 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9527 tree innerc = omp_find_clause (taskreg_clauses,
9528 OMP_CLAUSE__LOOPTEMP_);
9529 gcc_assert (innerc);
9530 for (i = 0; i < fd->collapse; i++)
9532 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9533 OMP_CLAUSE__LOOPTEMP_);
9534 gcc_assert (innerc);
9536 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9537 OMP_CLAUSE__LOOPTEMP_);
9538 if (innerc)
9539 n2 = fold_convert (TREE_TYPE (n2),
9540 lookup_decl (OMP_CLAUSE_DECL (innerc),
9541 taskreg_ctx));
9544 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9546 clauses = gimple_omp_for_clauses (fd->for_stmt);
9547 stmts = NULL;
9548 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9549 if (!gimple_seq_empty_p (stmts))
9551 gimple_seq_add_seq (&stmts, *dlist);
9552 *dlist = stmts;
9554 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9555 vinit = fd->loop.n1;
9556 if (cond_code == EQ_EXPR
9557 && tree_fits_shwi_p (fd->loop.n2)
9558 && ! integer_zerop (fd->loop.n2))
9559 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9560 else
9561 vinit = unshare_expr (vinit);
9563 /* Initialize the iterator variable, so that threads that don't execute
9564 any iterations don't execute the lastprivate clauses by accident. */
9565 gimplify_assign (fd->loop.v, vinit, body_p);
9569 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9571 static tree
9572 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9573 struct walk_stmt_info *wi)
9575 gimple *stmt = gsi_stmt (*gsi_p);
9577 *handled_ops_p = true;
9578 switch (gimple_code (stmt))
9580 WALK_SUBSTMTS;
9582 case GIMPLE_OMP_FOR:
9583 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
9584 && gimple_omp_for_combined_into_p (stmt))
9585 *handled_ops_p = false;
9586 break;
9588 case GIMPLE_OMP_SCAN:
9589 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9590 return integer_zero_node;
9591 default:
9592 break;
9594 return NULL;
9597 /* Helper function for lower_omp_for, add transformations for a worksharing
9598 loop with scan directives inside of it.
9599 For worksharing loop not combined with simd, transform:
9600 #pragma omp for reduction(inscan,+:r) private(i)
9601 for (i = 0; i < n; i = i + 1)
9604 update (r);
9606 #pragma omp scan inclusive(r)
9608 use (r);
9612 into two worksharing loops + code to merge results:
9614 num_threads = omp_get_num_threads ();
9615 thread_num = omp_get_thread_num ();
9616 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9617 <D.2099>:
9618 var2 = r;
9619 goto <D.2101>;
9620 <D.2100>:
9621 // For UDRs this is UDR init, or if ctors are needed, copy from
9622 // var3 that has been constructed to contain the neutral element.
9623 var2 = 0;
9624 <D.2101>:
9625 ivar = 0;
9626 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9627 // a shared array with num_threads elements and rprivb to a local array
9628 // number of elements equal to the number of (contiguous) iterations the
9629 // current thread will perform. controlb and controlp variables are
9630 // temporaries to handle deallocation of rprivb at the end of second
9631 // GOMP_FOR.
9632 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9633 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9634 for (i = 0; i < n; i = i + 1)
9637 // For UDRs this is UDR init or copy from var3.
9638 r = 0;
9639 // This is the input phase from user code.
9640 update (r);
9643 // For UDRs this is UDR merge.
9644 var2 = var2 + r;
9645 // Rather than handing it over to the user, save to local thread's
9646 // array.
9647 rprivb[ivar] = var2;
9648 // For exclusive scan, the above two statements are swapped.
9649 ivar = ivar + 1;
9652 // And remember the final value from this thread's into the shared
9653 // rpriva array.
9654 rpriva[(sizetype) thread_num] = var2;
9655 // If more than one thread, compute using Work-Efficient prefix sum
9656 // the inclusive parallel scan of the rpriva array.
9657 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9658 <D.2102>:
9659 GOMP_barrier ();
9660 down = 0;
9661 k = 1;
9662 num_threadsu = (unsigned int) num_threads;
9663 thread_numup1 = (unsigned int) thread_num + 1;
9664 <D.2108>:
9665 twok = k << 1;
9666 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9667 <D.2110>:
9668 down = 4294967295;
9669 k = k >> 1;
9670 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9671 <D.2112>:
9672 k = k >> 1;
9673 <D.2111>:
9674 twok = k << 1;
9675 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9676 mul = REALPART_EXPR <cplx>;
9677 ovf = IMAGPART_EXPR <cplx>;
9678 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9679 <D.2116>:
9680 andv = k & down;
9681 andvm1 = andv + 4294967295;
9682 l = mul + andvm1;
9683 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9684 <D.2120>:
9685 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9686 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9687 rpriva[l] = rpriva[l - k] + rpriva[l];
9688 <D.2117>:
9689 if (down == 0) goto <D.2121>; else goto <D.2122>;
9690 <D.2121>:
9691 k = k << 1;
9692 goto <D.2123>;
9693 <D.2122>:
9694 k = k >> 1;
9695 <D.2123>:
9696 GOMP_barrier ();
9697 if (k != 0) goto <D.2108>; else goto <D.2103>;
9698 <D.2103>:
9699 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9700 <D.2124>:
9701 // For UDRs this is UDR init or copy from var3.
9702 var2 = 0;
9703 goto <D.2126>;
9704 <D.2125>:
9705 var2 = rpriva[thread_num - 1];
9706 <D.2126>:
9707 ivar = 0;
9708 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9709 reduction(inscan,+:r) private(i)
9710 for (i = 0; i < n; i = i + 1)
9713 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9714 r = var2 + rprivb[ivar];
9717 // This is the scan phase from user code.
9718 use (r);
9719 // Plus a bump of the iterator.
9720 ivar = ivar + 1;
9722 } */
9724 static void
9725 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9726 struct omp_for_data *fd, omp_context *ctx)
9728 bool is_for_simd = gimple_omp_for_combined_p (stmt);
9729 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9731 gimple_seq body = gimple_omp_body (stmt);
9732 gimple_stmt_iterator input1_gsi = gsi_none ();
9733 struct walk_stmt_info wi;
9734 memset (&wi, 0, sizeof (wi));
9735 wi.val_only = true;
9736 wi.info = (void *) &input1_gsi;
9737 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9738 gcc_assert (!gsi_end_p (input1_gsi));
9740 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9741 gimple_stmt_iterator gsi = input1_gsi;
9742 gsi_next (&gsi);
9743 gimple_stmt_iterator scan1_gsi = gsi;
9744 gimple *scan_stmt1 = gsi_stmt (gsi);
9745 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9747 gimple_seq input_body = gimple_omp_body (input_stmt1);
9748 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9749 gimple_omp_set_body (input_stmt1, NULL);
9750 gimple_omp_set_body (scan_stmt1, NULL);
9751 gimple_omp_set_body (stmt, NULL);
9753 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9754 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9755 gimple_omp_set_body (stmt, body);
9756 gimple_omp_set_body (input_stmt1, input_body);
9758 gimple_stmt_iterator input2_gsi = gsi_none ();
9759 memset (&wi, 0, sizeof (wi));
9760 wi.val_only = true;
9761 wi.info = (void *) &input2_gsi;
9762 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9763 gcc_assert (!gsi_end_p (input2_gsi));
9765 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9766 gsi = input2_gsi;
9767 gsi_next (&gsi);
9768 gimple_stmt_iterator scan2_gsi = gsi;
9769 gimple *scan_stmt2 = gsi_stmt (gsi);
9770 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9771 gimple_omp_set_body (scan_stmt2, scan_body);
9773 gimple_stmt_iterator input3_gsi = gsi_none ();
9774 gimple_stmt_iterator scan3_gsi = gsi_none ();
9775 gimple_stmt_iterator input4_gsi = gsi_none ();
9776 gimple_stmt_iterator scan4_gsi = gsi_none ();
9777 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
9778 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
9779 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
9780 if (is_for_simd)
9782 memset (&wi, 0, sizeof (wi));
9783 wi.val_only = true;
9784 wi.info = (void *) &input3_gsi;
9785 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
9786 gcc_assert (!gsi_end_p (input3_gsi));
9788 input_stmt3 = gsi_stmt (input3_gsi);
9789 gsi = input3_gsi;
9790 gsi_next (&gsi);
9791 scan3_gsi = gsi;
9792 scan_stmt3 = gsi_stmt (gsi);
9793 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
9795 memset (&wi, 0, sizeof (wi));
9796 wi.val_only = true;
9797 wi.info = (void *) &input4_gsi;
9798 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
9799 gcc_assert (!gsi_end_p (input4_gsi));
9801 input_stmt4 = gsi_stmt (input4_gsi);
9802 gsi = input4_gsi;
9803 gsi_next (&gsi);
9804 scan4_gsi = gsi;
9805 scan_stmt4 = gsi_stmt (gsi);
9806 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
9808 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
9809 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
9812 tree num_threads = create_tmp_var (integer_type_node);
9813 tree thread_num = create_tmp_var (integer_type_node);
9814 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9815 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9816 gimple *g = gimple_build_call (nthreads_decl, 0);
9817 gimple_call_set_lhs (g, num_threads);
9818 gimple_seq_add_stmt (body_p, g);
9819 g = gimple_build_call (threadnum_decl, 0);
9820 gimple_call_set_lhs (g, thread_num);
9821 gimple_seq_add_stmt (body_p, g);
9823 tree ivar = create_tmp_var (sizetype);
9824 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9825 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9826 tree k = create_tmp_var (unsigned_type_node);
9827 tree l = create_tmp_var (unsigned_type_node);
9829 gimple_seq clist = NULL, mdlist = NULL;
9830 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9831 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9832 gimple_seq scan1_list = NULL, input2_list = NULL;
9833 gimple_seq last_list = NULL, reduc_list = NULL;
9834 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9835 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9836 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9838 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9839 tree var = OMP_CLAUSE_DECL (c);
9840 tree new_var = lookup_decl (var, ctx);
9841 tree var3 = NULL_TREE;
9842 tree new_vard = new_var;
9843 if (omp_is_reference (var))
9844 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9845 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9847 var3 = maybe_lookup_decl (new_vard, ctx);
9848 if (var3 == new_vard)
9849 var3 = NULL_TREE;
9852 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9853 tree rpriva = create_tmp_var (ptype);
9854 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9855 OMP_CLAUSE_DECL (nc) = rpriva;
9856 *cp1 = nc;
9857 cp1 = &OMP_CLAUSE_CHAIN (nc);
9859 tree rprivb = create_tmp_var (ptype);
9860 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9861 OMP_CLAUSE_DECL (nc) = rprivb;
9862 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9863 *cp1 = nc;
9864 cp1 = &OMP_CLAUSE_CHAIN (nc);
9866 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9867 if (new_vard != new_var)
9868 TREE_ADDRESSABLE (var2) = 1;
9869 gimple_add_tmp_var (var2);
9871 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9872 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9873 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9874 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9875 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9877 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9878 thread_num, integer_minus_one_node);
9879 x = fold_convert_loc (clause_loc, sizetype, x);
9880 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9881 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9882 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9883 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9885 x = fold_convert_loc (clause_loc, sizetype, l);
9886 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9887 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9888 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9889 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9891 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9892 x = fold_convert_loc (clause_loc, sizetype, x);
9893 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9894 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9895 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9896 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
9898 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
9899 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9900 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
9901 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
9903 tree var4 = is_for_simd ? new_var : var2;
9904 tree var5 = NULL_TREE, var6 = NULL_TREE;
9905 if (is_for_simd)
9907 var5 = lookup_decl (var, input_simd_ctx);
9908 var6 = lookup_decl (var, scan_simd_ctx);
9909 if (new_vard != new_var)
9911 var5 = build_simple_mem_ref_loc (clause_loc, var5);
9912 var6 = build_simple_mem_ref_loc (clause_loc, var6);
9915 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9917 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9918 tree val = var2;
9920 x = lang_hooks.decls.omp_clause_default_ctor
9921 (c, var2, build_outer_var_ref (var, ctx));
9922 if (x)
9923 gimplify_and_add (x, &clist);
9925 x = build_outer_var_ref (var, ctx);
9926 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
9928 gimplify_and_add (x, &thr01_list);
9930 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
9931 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9932 if (var3)
9934 x = unshare_expr (var4);
9935 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9936 gimplify_and_add (x, &thrn1_list);
9937 x = unshare_expr (var4);
9938 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9939 gimplify_and_add (x, &thr02_list);
9941 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9943 /* Otherwise, assign to it the identity element. */
9944 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9945 tseq = copy_gimple_seq_and_replace_locals (tseq);
9946 if (!is_for_simd)
9948 if (new_vard != new_var)
9949 val = build_fold_addr_expr_loc (clause_loc, val);
9950 SET_DECL_VALUE_EXPR (new_vard, val);
9951 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9953 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
9954 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9955 lower_omp (&tseq, ctx);
9956 gimple_seq_add_seq (&thrn1_list, tseq);
9957 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9958 lower_omp (&tseq, ctx);
9959 gimple_seq_add_seq (&thr02_list, tseq);
9960 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9961 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9962 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9963 if (y)
9964 SET_DECL_VALUE_EXPR (new_vard, y);
9965 else
9967 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9968 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9972 x = unshare_expr (var4);
9973 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
9974 gimplify_and_add (x, &thrn2_list);
9976 if (is_for_simd)
9978 x = unshare_expr (rprivb_ref);
9979 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
9980 gimplify_and_add (x, &scan1_list);
9982 else
9984 if (ctx->scan_exclusive)
9986 x = unshare_expr (rprivb_ref);
9987 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9988 gimplify_and_add (x, &scan1_list);
9991 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9992 tseq = copy_gimple_seq_and_replace_locals (tseq);
9993 SET_DECL_VALUE_EXPR (placeholder, var2);
9994 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9995 lower_omp (&tseq, ctx);
9996 gimple_seq_add_seq (&scan1_list, tseq);
9998 if (ctx->scan_inclusive)
10000 x = unshare_expr (rprivb_ref);
10001 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10002 gimplify_and_add (x, &scan1_list);
10006 x = unshare_expr (rpriva_ref);
10007 x = lang_hooks.decls.omp_clause_assign_op (c, x,
10008 unshare_expr (var4));
10009 gimplify_and_add (x, &mdlist);
10011 x = unshare_expr (is_for_simd ? var6 : new_var);
10012 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
10013 gimplify_and_add (x, &input2_list);
10015 val = rprivb_ref;
10016 if (new_vard != new_var)
10017 val = build_fold_addr_expr_loc (clause_loc, val);
10019 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10020 tseq = copy_gimple_seq_and_replace_locals (tseq);
10021 SET_DECL_VALUE_EXPR (new_vard, val);
10022 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10023 if (is_for_simd)
10025 SET_DECL_VALUE_EXPR (placeholder, var6);
10026 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10028 else
10029 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10030 lower_omp (&tseq, ctx);
10031 if (y)
10032 SET_DECL_VALUE_EXPR (new_vard, y);
10033 else
10035 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10036 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10038 if (!is_for_simd)
10040 SET_DECL_VALUE_EXPR (placeholder, new_var);
10041 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10042 lower_omp (&tseq, ctx);
10044 gimple_seq_add_seq (&input2_list, tseq);
10046 x = build_outer_var_ref (var, ctx);
10047 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
10048 gimplify_and_add (x, &last_list);
10050 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
10051 gimplify_and_add (x, &reduc_list);
10052 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10053 tseq = copy_gimple_seq_and_replace_locals (tseq);
10054 val = rprival_ref;
10055 if (new_vard != new_var)
10056 val = build_fold_addr_expr_loc (clause_loc, val);
10057 SET_DECL_VALUE_EXPR (new_vard, val);
10058 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10059 SET_DECL_VALUE_EXPR (placeholder, var2);
10060 lower_omp (&tseq, ctx);
10061 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10062 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10063 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10064 if (y)
10065 SET_DECL_VALUE_EXPR (new_vard, y);
10066 else
10068 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10069 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10071 gimple_seq_add_seq (&reduc_list, tseq);
10072 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
10073 gimplify_and_add (x, &reduc_list);
10075 x = lang_hooks.decls.omp_clause_dtor (c, var2);
10076 if (x)
10077 gimplify_and_add (x, dlist);
10079 else
10081 x = build_outer_var_ref (var, ctx);
10082 gimplify_assign (unshare_expr (var4), x, &thr01_list);
10084 x = omp_reduction_init (c, TREE_TYPE (new_var));
10085 gimplify_assign (unshare_expr (var4), unshare_expr (x),
10086 &thrn1_list);
10087 gimplify_assign (unshare_expr (var4), x, &thr02_list);
10089 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
10091 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10092 if (code == MINUS_EXPR)
10093 code = PLUS_EXPR;
10095 if (is_for_simd)
10096 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
10097 else
10099 if (ctx->scan_exclusive)
10100 gimplify_assign (unshare_expr (rprivb_ref), var2,
10101 &scan1_list);
10102 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10103 gimplify_assign (var2, x, &scan1_list);
10104 if (ctx->scan_inclusive)
10105 gimplify_assign (unshare_expr (rprivb_ref), var2,
10106 &scan1_list);
10109 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10110 &mdlist);
10112 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10113 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10115 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10116 &last_list);
10118 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10119 unshare_expr (rprival_ref));
10120 gimplify_assign (rprival_ref, x, &reduc_list);
10124 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10125 gimple_seq_add_stmt (&scan1_list, g);
10126 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10127 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10128 ? scan_stmt4 : scan_stmt2), g);
10130 tree controlb = create_tmp_var (boolean_type_node);
10131 tree controlp = create_tmp_var (ptr_type_node);
10132 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10133 OMP_CLAUSE_DECL (nc) = controlb;
10134 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10135 *cp1 = nc;
10136 cp1 = &OMP_CLAUSE_CHAIN (nc);
10137 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10138 OMP_CLAUSE_DECL (nc) = controlp;
10139 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10140 *cp1 = nc;
10141 cp1 = &OMP_CLAUSE_CHAIN (nc);
10142 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10143 OMP_CLAUSE_DECL (nc) = controlb;
10144 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10145 *cp2 = nc;
10146 cp2 = &OMP_CLAUSE_CHAIN (nc);
10147 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10148 OMP_CLAUSE_DECL (nc) = controlp;
10149 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10150 *cp2 = nc;
10151 cp2 = &OMP_CLAUSE_CHAIN (nc);
10153 *cp1 = gimple_omp_for_clauses (stmt);
10154 gimple_omp_for_set_clauses (stmt, new_clauses1);
10155 *cp2 = gimple_omp_for_clauses (new_stmt);
10156 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10158 if (is_for_simd)
10160 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10161 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10163 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10164 GSI_SAME_STMT);
10165 gsi_remove (&input3_gsi, true);
10166 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10167 GSI_SAME_STMT);
10168 gsi_remove (&scan3_gsi, true);
10169 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10170 GSI_SAME_STMT);
10171 gsi_remove (&input4_gsi, true);
10172 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10173 GSI_SAME_STMT);
10174 gsi_remove (&scan4_gsi, true);
10176 else
10178 gimple_omp_set_body (scan_stmt1, scan1_list);
10179 gimple_omp_set_body (input_stmt2, input2_list);
10182 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10183 GSI_SAME_STMT);
10184 gsi_remove (&input1_gsi, true);
10185 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10186 GSI_SAME_STMT);
10187 gsi_remove (&scan1_gsi, true);
10188 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10189 GSI_SAME_STMT);
10190 gsi_remove (&input2_gsi, true);
10191 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10192 GSI_SAME_STMT);
10193 gsi_remove (&scan2_gsi, true);
10195 gimple_seq_add_seq (body_p, clist);
10197 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10198 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10199 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10200 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10201 gimple_seq_add_stmt (body_p, g);
10202 g = gimple_build_label (lab1);
10203 gimple_seq_add_stmt (body_p, g);
10204 gimple_seq_add_seq (body_p, thr01_list);
10205 g = gimple_build_goto (lab3);
10206 gimple_seq_add_stmt (body_p, g);
10207 g = gimple_build_label (lab2);
10208 gimple_seq_add_stmt (body_p, g);
10209 gimple_seq_add_seq (body_p, thrn1_list);
10210 g = gimple_build_label (lab3);
10211 gimple_seq_add_stmt (body_p, g);
10213 g = gimple_build_assign (ivar, size_zero_node);
10214 gimple_seq_add_stmt (body_p, g);
10216 gimple_seq_add_stmt (body_p, stmt);
10217 gimple_seq_add_seq (body_p, body);
10218 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10219 fd->loop.v));
10221 g = gimple_build_omp_return (true);
10222 gimple_seq_add_stmt (body_p, g);
10223 gimple_seq_add_seq (body_p, mdlist);
10225 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10226 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10227 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10228 gimple_seq_add_stmt (body_p, g);
10229 g = gimple_build_label (lab1);
10230 gimple_seq_add_stmt (body_p, g);
10232 g = omp_build_barrier (NULL);
10233 gimple_seq_add_stmt (body_p, g);
10235 tree down = create_tmp_var (unsigned_type_node);
10236 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10237 gimple_seq_add_stmt (body_p, g);
10239 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10240 gimple_seq_add_stmt (body_p, g);
10242 tree num_threadsu = create_tmp_var (unsigned_type_node);
10243 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10244 gimple_seq_add_stmt (body_p, g);
10246 tree thread_numu = create_tmp_var (unsigned_type_node);
10247 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10248 gimple_seq_add_stmt (body_p, g);
10250 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10251 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10252 build_int_cst (unsigned_type_node, 1));
10253 gimple_seq_add_stmt (body_p, g);
10255 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10256 g = gimple_build_label (lab3);
10257 gimple_seq_add_stmt (body_p, g);
10259 tree twok = create_tmp_var (unsigned_type_node);
10260 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10261 gimple_seq_add_stmt (body_p, g);
10263 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10264 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10265 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10266 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10267 gimple_seq_add_stmt (body_p, g);
10268 g = gimple_build_label (lab4);
10269 gimple_seq_add_stmt (body_p, g);
10270 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10271 gimple_seq_add_stmt (body_p, g);
10272 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10273 gimple_seq_add_stmt (body_p, g);
10275 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10276 gimple_seq_add_stmt (body_p, g);
10277 g = gimple_build_label (lab6);
10278 gimple_seq_add_stmt (body_p, g);
10280 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10281 gimple_seq_add_stmt (body_p, g);
10283 g = gimple_build_label (lab5);
10284 gimple_seq_add_stmt (body_p, g);
10286 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10287 gimple_seq_add_stmt (body_p, g);
10289 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10290 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10291 gimple_call_set_lhs (g, cplx);
10292 gimple_seq_add_stmt (body_p, g);
10293 tree mul = create_tmp_var (unsigned_type_node);
10294 g = gimple_build_assign (mul, REALPART_EXPR,
10295 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10296 gimple_seq_add_stmt (body_p, g);
10297 tree ovf = create_tmp_var (unsigned_type_node);
10298 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10299 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10300 gimple_seq_add_stmt (body_p, g);
10302 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10303 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10304 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10305 lab7, lab8);
10306 gimple_seq_add_stmt (body_p, g);
10307 g = gimple_build_label (lab7);
10308 gimple_seq_add_stmt (body_p, g);
10310 tree andv = create_tmp_var (unsigned_type_node);
10311 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10312 gimple_seq_add_stmt (body_p, g);
10313 tree andvm1 = create_tmp_var (unsigned_type_node);
10314 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10315 build_minus_one_cst (unsigned_type_node));
10316 gimple_seq_add_stmt (body_p, g);
10318 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10319 gimple_seq_add_stmt (body_p, g);
10321 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10322 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10323 gimple_seq_add_stmt (body_p, g);
10324 g = gimple_build_label (lab9);
10325 gimple_seq_add_stmt (body_p, g);
10326 gimple_seq_add_seq (body_p, reduc_list);
10327 g = gimple_build_label (lab8);
10328 gimple_seq_add_stmt (body_p, g);
10330 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10331 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10332 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10333 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10334 lab10, lab11);
10335 gimple_seq_add_stmt (body_p, g);
10336 g = gimple_build_label (lab10);
10337 gimple_seq_add_stmt (body_p, g);
10338 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10339 gimple_seq_add_stmt (body_p, g);
10340 g = gimple_build_goto (lab12);
10341 gimple_seq_add_stmt (body_p, g);
10342 g = gimple_build_label (lab11);
10343 gimple_seq_add_stmt (body_p, g);
10344 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10345 gimple_seq_add_stmt (body_p, g);
10346 g = gimple_build_label (lab12);
10347 gimple_seq_add_stmt (body_p, g);
10349 g = omp_build_barrier (NULL);
10350 gimple_seq_add_stmt (body_p, g);
10352 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10353 lab3, lab2);
10354 gimple_seq_add_stmt (body_p, g);
10356 g = gimple_build_label (lab2);
10357 gimple_seq_add_stmt (body_p, g);
10359 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10360 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10361 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10362 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10363 gimple_seq_add_stmt (body_p, g);
10364 g = gimple_build_label (lab1);
10365 gimple_seq_add_stmt (body_p, g);
10366 gimple_seq_add_seq (body_p, thr02_list);
10367 g = gimple_build_goto (lab3);
10368 gimple_seq_add_stmt (body_p, g);
10369 g = gimple_build_label (lab2);
10370 gimple_seq_add_stmt (body_p, g);
10371 gimple_seq_add_seq (body_p, thrn2_list);
10372 g = gimple_build_label (lab3);
10373 gimple_seq_add_stmt (body_p, g);
10375 g = gimple_build_assign (ivar, size_zero_node);
10376 gimple_seq_add_stmt (body_p, g);
10377 gimple_seq_add_stmt (body_p, new_stmt);
10378 gimple_seq_add_seq (body_p, new_body);
10380 gimple_seq new_dlist = NULL;
10381 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10382 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10383 tree num_threadsm1 = create_tmp_var (integer_type_node);
10384 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10385 integer_minus_one_node);
10386 gimple_seq_add_stmt (&new_dlist, g);
10387 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10388 gimple_seq_add_stmt (&new_dlist, g);
10389 g = gimple_build_label (lab1);
10390 gimple_seq_add_stmt (&new_dlist, g);
10391 gimple_seq_add_seq (&new_dlist, last_list);
10392 g = gimple_build_label (lab2);
10393 gimple_seq_add_stmt (&new_dlist, g);
10394 gimple_seq_add_seq (&new_dlist, *dlist);
10395 *dlist = new_dlist;
10398 /* Lower code for an OMP loop directive. */
10400 static void
10401 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10403 tree *rhs_p, block;
10404 struct omp_for_data fd, *fdp = NULL;
10405 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10406 gbind *new_stmt;
10407 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10408 gimple_seq cnt_list = NULL, clist = NULL;
10409 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10410 size_t i;
10412 push_gimplify_context ();
10414 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10416 block = make_node (BLOCK);
10417 new_stmt = gimple_build_bind (NULL, NULL, block);
10418 /* Replace at gsi right away, so that 'stmt' is no member
10419 of a sequence anymore as we're going to add to a different
10420 one below. */
10421 gsi_replace (gsi_p, new_stmt, true);
10423 /* Move declaration of temporaries in the loop body before we make
10424 it go away. */
10425 omp_for_body = gimple_omp_body (stmt);
10426 if (!gimple_seq_empty_p (omp_for_body)
10427 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10429 gbind *inner_bind
10430 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10431 tree vars = gimple_bind_vars (inner_bind);
10432 gimple_bind_append_vars (new_stmt, vars);
10433 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10434 keep them on the inner_bind and it's block. */
10435 gimple_bind_set_vars (inner_bind, NULL_TREE);
10436 if (gimple_bind_block (inner_bind))
10437 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10440 if (gimple_omp_for_combined_into_p (stmt))
10442 omp_extract_for_data (stmt, &fd, NULL);
10443 fdp = &fd;
10445 /* We need two temporaries with fd.loop.v type (istart/iend)
10446 and then (fd.collapse - 1) temporaries with the same
10447 type for count2 ... countN-1 vars if not constant. */
10448 size_t count = 2;
10449 tree type = fd.iter_type;
10450 if (fd.collapse > 1
10451 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10452 count += fd.collapse - 1;
10453 bool taskreg_for
10454 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10455 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10456 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10457 tree simtc = NULL;
10458 tree clauses = *pc;
10459 if (taskreg_for)
10460 outerc
10461 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10462 OMP_CLAUSE__LOOPTEMP_);
10463 if (ctx->simt_stmt)
10464 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10465 OMP_CLAUSE__LOOPTEMP_);
10466 for (i = 0; i < count; i++)
10468 tree temp;
10469 if (taskreg_for)
10471 gcc_assert (outerc);
10472 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10473 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10474 OMP_CLAUSE__LOOPTEMP_);
10476 else
10478 /* If there are 2 adjacent SIMD stmts, one with _simt_
10479 clause, another without, make sure they have the same
10480 decls in _looptemp_ clauses, because the outer stmt
10481 they are combined into will look up just one inner_stmt. */
10482 if (ctx->simt_stmt)
10483 temp = OMP_CLAUSE_DECL (simtc);
10484 else
10485 temp = create_tmp_var (type);
10486 insert_decl_map (&ctx->outer->cb, temp, temp);
10488 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10489 OMP_CLAUSE_DECL (*pc) = temp;
10490 pc = &OMP_CLAUSE_CHAIN (*pc);
10491 if (ctx->simt_stmt)
10492 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10493 OMP_CLAUSE__LOOPTEMP_);
10495 *pc = clauses;
10498 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10499 dlist = NULL;
10500 body = NULL;
10501 tree rclauses
10502 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10503 OMP_CLAUSE_REDUCTION);
10504 tree rtmp = NULL_TREE;
10505 if (rclauses)
10507 tree type = build_pointer_type (pointer_sized_int_node);
10508 tree temp = create_tmp_var (type);
10509 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10510 OMP_CLAUSE_DECL (c) = temp;
10511 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10512 gimple_omp_for_set_clauses (stmt, c);
10513 lower_omp_task_reductions (ctx, OMP_FOR,
10514 gimple_omp_for_clauses (stmt),
10515 &tred_ilist, &tred_dlist);
10516 rclauses = c;
10517 rtmp = make_ssa_name (type);
10518 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10521 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10522 ctx);
10524 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10525 fdp);
10526 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10527 gimple_omp_for_pre_body (stmt));
10529 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10531 /* Lower the header expressions. At this point, we can assume that
10532 the header is of the form:
10534 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10536 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10537 using the .omp_data_s mapping, if needed. */
10538 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10540 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10541 if (TREE_CODE (*rhs_p) == TREE_VEC)
10543 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
10544 TREE_VEC_ELT (*rhs_p, 1)
10545 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
10546 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
10547 TREE_VEC_ELT (*rhs_p, 2)
10548 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
10550 else if (!is_gimple_min_invariant (*rhs_p))
10551 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10552 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10553 recompute_tree_invariant_for_addr_expr (*rhs_p);
10555 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10556 if (TREE_CODE (*rhs_p) == TREE_VEC)
10558 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
10559 TREE_VEC_ELT (*rhs_p, 1)
10560 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
10561 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
10562 TREE_VEC_ELT (*rhs_p, 2)
10563 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
10565 else if (!is_gimple_min_invariant (*rhs_p))
10566 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10567 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10568 recompute_tree_invariant_for_addr_expr (*rhs_p);
10570 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10571 if (!is_gimple_min_invariant (*rhs_p))
10572 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10574 if (rclauses)
10575 gimple_seq_add_seq (&tred_ilist, cnt_list);
10576 else
10577 gimple_seq_add_seq (&body, cnt_list);
10579 /* Once lowered, extract the bounds and clauses. */
10580 omp_extract_for_data (stmt, &fd, NULL);
10582 if (is_gimple_omp_oacc (ctx->stmt)
10583 && !ctx_in_oacc_kernels_region (ctx))
10584 lower_oacc_head_tail (gimple_location (stmt),
10585 gimple_omp_for_clauses (stmt),
10586 &oacc_head, &oacc_tail, ctx);
10588 /* Add OpenACC partitioning and reduction markers just before the loop. */
10589 if (oacc_head)
10590 gimple_seq_add_seq (&body, oacc_head);
10592 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
10594 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10595 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10596 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10597 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10599 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10600 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10601 OMP_CLAUSE_LINEAR_STEP (c)
10602 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10603 ctx);
10606 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10607 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10608 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10609 else
10611 gimple_seq_add_stmt (&body, stmt);
10612 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10615 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10616 fd.loop.v));
10618 /* After the loop, add exit clauses. */
10619 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10621 if (clist)
10623 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10624 gcall *g = gimple_build_call (fndecl, 0);
10625 gimple_seq_add_stmt (&body, g);
10626 gimple_seq_add_seq (&body, clist);
10627 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10628 g = gimple_build_call (fndecl, 0);
10629 gimple_seq_add_stmt (&body, g);
10632 if (ctx->cancellable)
10633 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10635 gimple_seq_add_seq (&body, dlist);
10637 if (rclauses)
10639 gimple_seq_add_seq (&tred_ilist, body);
10640 body = tred_ilist;
10643 body = maybe_catch_exception (body);
10645 /* Region exit marker goes at the end of the loop body. */
10646 gimple *g = gimple_build_omp_return (fd.have_nowait);
10647 gimple_seq_add_stmt (&body, g);
10649 gimple_seq_add_seq (&body, tred_dlist);
10651 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10653 if (rclauses)
10654 OMP_CLAUSE_DECL (rclauses) = rtmp;
10656 /* Add OpenACC joining and reduction markers just after the loop. */
10657 if (oacc_tail)
10658 gimple_seq_add_seq (&body, oacc_tail);
10660 pop_gimplify_context (new_stmt);
10662 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10663 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10664 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10665 if (BLOCK_VARS (block))
10666 TREE_USED (block) = 1;
10668 gimple_bind_set_body (new_stmt, body);
10669 gimple_omp_set_body (stmt, NULL);
10670 gimple_omp_for_set_pre_body (stmt, NULL);
10673 /* Callback for walk_stmts. Check if the current statement only contains
10674 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10676 static tree
10677 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10678 bool *handled_ops_p,
10679 struct walk_stmt_info *wi)
10681 int *info = (int *) wi->info;
10682 gimple *stmt = gsi_stmt (*gsi_p);
10684 *handled_ops_p = true;
10685 switch (gimple_code (stmt))
10687 WALK_SUBSTMTS;
10689 case GIMPLE_DEBUG:
10690 break;
10691 case GIMPLE_OMP_FOR:
10692 case GIMPLE_OMP_SECTIONS:
10693 *info = *info == 0 ? 1 : -1;
10694 break;
10695 default:
10696 *info = -1;
10697 break;
10699 return NULL;
10702 struct omp_taskcopy_context
10704 /* This field must be at the beginning, as we do "inheritance": Some
10705 callback functions for tree-inline.c (e.g., omp_copy_decl)
10706 receive a copy_body_data pointer that is up-casted to an
10707 omp_context pointer. */
10708 copy_body_data cb;
10709 omp_context *ctx;
10712 static tree
10713 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10715 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10717 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10718 return create_tmp_var (TREE_TYPE (var));
10720 return var;
10723 static tree
10724 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10726 tree name, new_fields = NULL, type, f;
10728 type = lang_hooks.types.make_type (RECORD_TYPE);
10729 name = DECL_NAME (TYPE_NAME (orig_type));
10730 name = build_decl (gimple_location (tcctx->ctx->stmt),
10731 TYPE_DECL, name, type);
10732 TYPE_NAME (type) = name;
10734 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10736 tree new_f = copy_node (f);
10737 DECL_CONTEXT (new_f) = type;
10738 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10739 TREE_CHAIN (new_f) = new_fields;
10740 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10741 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10742 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10743 &tcctx->cb, NULL);
10744 new_fields = new_f;
10745 tcctx->cb.decl_map->put (f, new_f);
10747 TYPE_FIELDS (type) = nreverse (new_fields);
10748 layout_type (type);
10749 return type;
10752 /* Create task copyfn. */
10754 static void
10755 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10757 struct function *child_cfun;
10758 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10759 tree record_type, srecord_type, bind, list;
10760 bool record_needs_remap = false, srecord_needs_remap = false;
10761 splay_tree_node n;
10762 struct omp_taskcopy_context tcctx;
10763 location_t loc = gimple_location (task_stmt);
10764 size_t looptempno = 0;
10766 child_fn = gimple_omp_task_copy_fn (task_stmt);
10767 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10768 gcc_assert (child_cfun->cfg == NULL);
10769 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10771 /* Reset DECL_CONTEXT on function arguments. */
10772 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10773 DECL_CONTEXT (t) = child_fn;
10775 /* Populate the function. */
10776 push_gimplify_context ();
10777 push_cfun (child_cfun);
10779 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10780 TREE_SIDE_EFFECTS (bind) = 1;
10781 list = NULL;
10782 DECL_SAVED_TREE (child_fn) = bind;
10783 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10785 /* Remap src and dst argument types if needed. */
10786 record_type = ctx->record_type;
10787 srecord_type = ctx->srecord_type;
10788 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10789 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10791 record_needs_remap = true;
10792 break;
10794 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10795 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10797 srecord_needs_remap = true;
10798 break;
10801 if (record_needs_remap || srecord_needs_remap)
10803 memset (&tcctx, '\0', sizeof (tcctx));
10804 tcctx.cb.src_fn = ctx->cb.src_fn;
10805 tcctx.cb.dst_fn = child_fn;
10806 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10807 gcc_checking_assert (tcctx.cb.src_node);
10808 tcctx.cb.dst_node = tcctx.cb.src_node;
10809 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10810 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10811 tcctx.cb.eh_lp_nr = 0;
10812 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10813 tcctx.cb.decl_map = new hash_map<tree, tree>;
10814 tcctx.ctx = ctx;
10816 if (record_needs_remap)
10817 record_type = task_copyfn_remap_type (&tcctx, record_type);
10818 if (srecord_needs_remap)
10819 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10821 else
10822 tcctx.cb.decl_map = NULL;
10824 arg = DECL_ARGUMENTS (child_fn);
10825 TREE_TYPE (arg) = build_pointer_type (record_type);
10826 sarg = DECL_CHAIN (arg);
10827 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10829 /* First pass: initialize temporaries used in record_type and srecord_type
10830 sizes and field offsets. */
10831 if (tcctx.cb.decl_map)
10832 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10833 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10835 tree *p;
10837 decl = OMP_CLAUSE_DECL (c);
10838 p = tcctx.cb.decl_map->get (decl);
10839 if (p == NULL)
10840 continue;
10841 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10842 sf = (tree) n->value;
10843 sf = *tcctx.cb.decl_map->get (sf);
10844 src = build_simple_mem_ref_loc (loc, sarg);
10845 src = omp_build_component_ref (src, sf);
10846 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10847 append_to_statement_list (t, &list);
10850 /* Second pass: copy shared var pointers and copy construct non-VLA
10851 firstprivate vars. */
10852 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10853 switch (OMP_CLAUSE_CODE (c))
10855 splay_tree_key key;
10856 case OMP_CLAUSE_SHARED:
10857 decl = OMP_CLAUSE_DECL (c);
10858 key = (splay_tree_key) decl;
10859 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10860 key = (splay_tree_key) &DECL_UID (decl);
10861 n = splay_tree_lookup (ctx->field_map, key);
10862 if (n == NULL)
10863 break;
10864 f = (tree) n->value;
10865 if (tcctx.cb.decl_map)
10866 f = *tcctx.cb.decl_map->get (f);
10867 n = splay_tree_lookup (ctx->sfield_map, key);
10868 sf = (tree) n->value;
10869 if (tcctx.cb.decl_map)
10870 sf = *tcctx.cb.decl_map->get (sf);
10871 src = build_simple_mem_ref_loc (loc, sarg);
10872 src = omp_build_component_ref (src, sf);
10873 dst = build_simple_mem_ref_loc (loc, arg);
10874 dst = omp_build_component_ref (dst, f);
10875 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10876 append_to_statement_list (t, &list);
10877 break;
10878 case OMP_CLAUSE_REDUCTION:
10879 case OMP_CLAUSE_IN_REDUCTION:
10880 decl = OMP_CLAUSE_DECL (c);
10881 if (TREE_CODE (decl) == MEM_REF)
10883 decl = TREE_OPERAND (decl, 0);
10884 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10885 decl = TREE_OPERAND (decl, 0);
10886 if (TREE_CODE (decl) == INDIRECT_REF
10887 || TREE_CODE (decl) == ADDR_EXPR)
10888 decl = TREE_OPERAND (decl, 0);
10890 key = (splay_tree_key) decl;
10891 n = splay_tree_lookup (ctx->field_map, key);
10892 if (n == NULL)
10893 break;
10894 f = (tree) n->value;
10895 if (tcctx.cb.decl_map)
10896 f = *tcctx.cb.decl_map->get (f);
10897 n = splay_tree_lookup (ctx->sfield_map, key);
10898 sf = (tree) n->value;
10899 if (tcctx.cb.decl_map)
10900 sf = *tcctx.cb.decl_map->get (sf);
10901 src = build_simple_mem_ref_loc (loc, sarg);
10902 src = omp_build_component_ref (src, sf);
10903 if (decl != OMP_CLAUSE_DECL (c)
10904 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10905 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10906 src = build_simple_mem_ref_loc (loc, src);
10907 dst = build_simple_mem_ref_loc (loc, arg);
10908 dst = omp_build_component_ref (dst, f);
10909 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10910 append_to_statement_list (t, &list);
10911 break;
10912 case OMP_CLAUSE__LOOPTEMP_:
10913 /* Fields for first two _looptemp_ clauses are initialized by
10914 GOMP_taskloop*, the rest are handled like firstprivate. */
10915 if (looptempno < 2)
10917 looptempno++;
10918 break;
10920 /* FALLTHRU */
10921 case OMP_CLAUSE__REDUCTEMP_:
10922 case OMP_CLAUSE_FIRSTPRIVATE:
10923 decl = OMP_CLAUSE_DECL (c);
10924 if (is_variable_sized (decl))
10925 break;
10926 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10927 if (n == NULL)
10928 break;
10929 f = (tree) n->value;
10930 if (tcctx.cb.decl_map)
10931 f = *tcctx.cb.decl_map->get (f);
10932 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10933 if (n != NULL)
10935 sf = (tree) n->value;
10936 if (tcctx.cb.decl_map)
10937 sf = *tcctx.cb.decl_map->get (sf);
10938 src = build_simple_mem_ref_loc (loc, sarg);
10939 src = omp_build_component_ref (src, sf);
10940 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
10941 src = build_simple_mem_ref_loc (loc, src);
10943 else
10944 src = decl;
10945 dst = build_simple_mem_ref_loc (loc, arg);
10946 dst = omp_build_component_ref (dst, f);
10947 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
10948 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10949 else
10950 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10951 append_to_statement_list (t, &list);
10952 break;
10953 case OMP_CLAUSE_PRIVATE:
10954 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
10955 break;
10956 decl = OMP_CLAUSE_DECL (c);
10957 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10958 f = (tree) n->value;
10959 if (tcctx.cb.decl_map)
10960 f = *tcctx.cb.decl_map->get (f);
10961 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10962 if (n != NULL)
10964 sf = (tree) n->value;
10965 if (tcctx.cb.decl_map)
10966 sf = *tcctx.cb.decl_map->get (sf);
10967 src = build_simple_mem_ref_loc (loc, sarg);
10968 src = omp_build_component_ref (src, sf);
10969 if (use_pointer_for_field (decl, NULL))
10970 src = build_simple_mem_ref_loc (loc, src);
10972 else
10973 src = decl;
10974 dst = build_simple_mem_ref_loc (loc, arg);
10975 dst = omp_build_component_ref (dst, f);
10976 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10977 append_to_statement_list (t, &list);
10978 break;
10979 default:
10980 break;
10983 /* Last pass: handle VLA firstprivates. */
10984 if (tcctx.cb.decl_map)
10985 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10986 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10988 tree ind, ptr, df;
10990 decl = OMP_CLAUSE_DECL (c);
10991 if (!is_variable_sized (decl))
10992 continue;
10993 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10994 if (n == NULL)
10995 continue;
10996 f = (tree) n->value;
10997 f = *tcctx.cb.decl_map->get (f);
10998 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
10999 ind = DECL_VALUE_EXPR (decl);
11000 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
11001 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
11002 n = splay_tree_lookup (ctx->sfield_map,
11003 (splay_tree_key) TREE_OPERAND (ind, 0));
11004 sf = (tree) n->value;
11005 sf = *tcctx.cb.decl_map->get (sf);
11006 src = build_simple_mem_ref_loc (loc, sarg);
11007 src = omp_build_component_ref (src, sf);
11008 src = build_simple_mem_ref_loc (loc, src);
11009 dst = build_simple_mem_ref_loc (loc, arg);
11010 dst = omp_build_component_ref (dst, f);
11011 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11012 append_to_statement_list (t, &list);
11013 n = splay_tree_lookup (ctx->field_map,
11014 (splay_tree_key) TREE_OPERAND (ind, 0));
11015 df = (tree) n->value;
11016 df = *tcctx.cb.decl_map->get (df);
11017 ptr = build_simple_mem_ref_loc (loc, arg);
11018 ptr = omp_build_component_ref (ptr, df);
11019 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
11020 build_fold_addr_expr_loc (loc, dst));
11021 append_to_statement_list (t, &list);
11024 t = build1 (RETURN_EXPR, void_type_node, NULL);
11025 append_to_statement_list (t, &list);
11027 if (tcctx.cb.decl_map)
11028 delete tcctx.cb.decl_map;
11029 pop_gimplify_context (NULL);
11030 BIND_EXPR_BODY (bind) = list;
11031 pop_cfun ();
11034 static void
11035 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
11037 tree c, clauses;
11038 gimple *g;
11039 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
11041 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
11042 gcc_assert (clauses);
11043 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11044 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
11045 switch (OMP_CLAUSE_DEPEND_KIND (c))
11047 case OMP_CLAUSE_DEPEND_LAST:
11048 /* Lowering already done at gimplification. */
11049 return;
11050 case OMP_CLAUSE_DEPEND_IN:
11051 cnt[2]++;
11052 break;
11053 case OMP_CLAUSE_DEPEND_OUT:
11054 case OMP_CLAUSE_DEPEND_INOUT:
11055 cnt[0]++;
11056 break;
11057 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11058 cnt[1]++;
11059 break;
11060 case OMP_CLAUSE_DEPEND_DEPOBJ:
11061 cnt[3]++;
11062 break;
11063 case OMP_CLAUSE_DEPEND_SOURCE:
11064 case OMP_CLAUSE_DEPEND_SINK:
11065 /* FALLTHRU */
11066 default:
11067 gcc_unreachable ();
11069 if (cnt[1] || cnt[3])
11070 idx = 5;
11071 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
11072 tree type = build_array_type_nelts (ptr_type_node, total + idx);
11073 tree array = create_tmp_var (type);
11074 TREE_ADDRESSABLE (array) = 1;
11075 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
11076 NULL_TREE);
11077 if (idx == 5)
11079 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
11080 gimple_seq_add_stmt (iseq, g);
11081 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
11082 NULL_TREE);
11084 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
11085 gimple_seq_add_stmt (iseq, g);
11086 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
11088 r = build4 (ARRAY_REF, ptr_type_node, array,
11089 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
11090 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
11091 gimple_seq_add_stmt (iseq, g);
11093 for (i = 0; i < 4; i++)
11095 if (cnt[i] == 0)
11096 continue;
11097 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11098 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
11099 continue;
11100 else
11102 switch (OMP_CLAUSE_DEPEND_KIND (c))
11104 case OMP_CLAUSE_DEPEND_IN:
11105 if (i != 2)
11106 continue;
11107 break;
11108 case OMP_CLAUSE_DEPEND_OUT:
11109 case OMP_CLAUSE_DEPEND_INOUT:
11110 if (i != 0)
11111 continue;
11112 break;
11113 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11114 if (i != 1)
11115 continue;
11116 break;
11117 case OMP_CLAUSE_DEPEND_DEPOBJ:
11118 if (i != 3)
11119 continue;
11120 break;
11121 default:
11122 gcc_unreachable ();
11124 tree t = OMP_CLAUSE_DECL (c);
11125 t = fold_convert (ptr_type_node, t);
11126 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11127 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11128 NULL_TREE, NULL_TREE);
11129 g = gimple_build_assign (r, t);
11130 gimple_seq_add_stmt (iseq, g);
11133 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11134 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11135 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11136 OMP_CLAUSE_CHAIN (c) = *pclauses;
11137 *pclauses = c;
11138 tree clobber = build_clobber (type);
11139 g = gimple_build_assign (array, clobber);
11140 gimple_seq_add_stmt (oseq, g);
11143 /* Lower the OpenMP parallel or task directive in the current statement
11144 in GSI_P. CTX holds context information for the directive. */
11146 static void
11147 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11149 tree clauses;
11150 tree child_fn, t;
11151 gimple *stmt = gsi_stmt (*gsi_p);
11152 gbind *par_bind, *bind, *dep_bind = NULL;
11153 gimple_seq par_body;
11154 location_t loc = gimple_location (stmt);
11156 clauses = gimple_omp_taskreg_clauses (stmt);
11157 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11158 && gimple_omp_task_taskwait_p (stmt))
11160 par_bind = NULL;
11161 par_body = NULL;
11163 else
11165 par_bind
11166 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11167 par_body = gimple_bind_body (par_bind);
11169 child_fn = ctx->cb.dst_fn;
11170 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11171 && !gimple_omp_parallel_combined_p (stmt))
11173 struct walk_stmt_info wi;
11174 int ws_num = 0;
11176 memset (&wi, 0, sizeof (wi));
11177 wi.info = &ws_num;
11178 wi.val_only = true;
11179 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11180 if (ws_num == 1)
11181 gimple_omp_parallel_set_combined_p (stmt, true);
11183 gimple_seq dep_ilist = NULL;
11184 gimple_seq dep_olist = NULL;
11185 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11186 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11188 push_gimplify_context ();
11189 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11190 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11191 &dep_ilist, &dep_olist);
11194 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11195 && gimple_omp_task_taskwait_p (stmt))
11197 if (dep_bind)
11199 gsi_replace (gsi_p, dep_bind, true);
11200 gimple_bind_add_seq (dep_bind, dep_ilist);
11201 gimple_bind_add_stmt (dep_bind, stmt);
11202 gimple_bind_add_seq (dep_bind, dep_olist);
11203 pop_gimplify_context (dep_bind);
11205 return;
11208 if (ctx->srecord_type)
11209 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11211 gimple_seq tskred_ilist = NULL;
11212 gimple_seq tskred_olist = NULL;
11213 if ((is_task_ctx (ctx)
11214 && gimple_omp_task_taskloop_p (ctx->stmt)
11215 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11216 OMP_CLAUSE_REDUCTION))
11217 || (is_parallel_ctx (ctx)
11218 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11219 OMP_CLAUSE__REDUCTEMP_)))
11221 if (dep_bind == NULL)
11223 push_gimplify_context ();
11224 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11226 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11227 : OMP_PARALLEL,
11228 gimple_omp_taskreg_clauses (ctx->stmt),
11229 &tskred_ilist, &tskred_olist);
11232 push_gimplify_context ();
11234 gimple_seq par_olist = NULL;
11235 gimple_seq par_ilist = NULL;
11236 gimple_seq par_rlist = NULL;
11237 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11238 lower_omp (&par_body, ctx);
11239 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
11240 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11242 /* Declare all the variables created by mapping and the variables
11243 declared in the scope of the parallel body. */
11244 record_vars_into (ctx->block_vars, child_fn);
11245 maybe_remove_omp_member_access_dummy_vars (par_bind);
11246 record_vars_into (gimple_bind_vars (par_bind), child_fn);
11248 if (ctx->record_type)
11250 ctx->sender_decl
11251 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11252 : ctx->record_type, ".omp_data_o");
11253 DECL_NAMELESS (ctx->sender_decl) = 1;
11254 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11255 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11258 gimple_seq olist = NULL;
11259 gimple_seq ilist = NULL;
11260 lower_send_clauses (clauses, &ilist, &olist, ctx);
11261 lower_send_shared_vars (&ilist, &olist, ctx);
11263 if (ctx->record_type)
11265 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
11266 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11267 clobber));
11270 /* Once all the expansions are done, sequence all the different
11271 fragments inside gimple_omp_body. */
11273 gimple_seq new_body = NULL;
11275 if (ctx->record_type)
11277 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11278 /* fixup_child_record_type might have changed receiver_decl's type. */
11279 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11280 gimple_seq_add_stmt (&new_body,
11281 gimple_build_assign (ctx->receiver_decl, t));
11284 gimple_seq_add_seq (&new_body, par_ilist);
11285 gimple_seq_add_seq (&new_body, par_body);
11286 gimple_seq_add_seq (&new_body, par_rlist);
11287 if (ctx->cancellable)
11288 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11289 gimple_seq_add_seq (&new_body, par_olist);
11290 new_body = maybe_catch_exception (new_body);
11291 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11292 gimple_seq_add_stmt (&new_body,
11293 gimple_build_omp_continue (integer_zero_node,
11294 integer_zero_node));
11295 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11296 gimple_omp_set_body (stmt, new_body);
11298 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11299 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11300 else
11301 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11302 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11303 gimple_bind_add_seq (bind, ilist);
11304 gimple_bind_add_stmt (bind, stmt);
11305 gimple_bind_add_seq (bind, olist);
11307 pop_gimplify_context (NULL);
11309 if (dep_bind)
11311 gimple_bind_add_seq (dep_bind, dep_ilist);
11312 gimple_bind_add_seq (dep_bind, tskred_ilist);
11313 gimple_bind_add_stmt (dep_bind, bind);
11314 gimple_bind_add_seq (dep_bind, tskred_olist);
11315 gimple_bind_add_seq (dep_bind, dep_olist);
11316 pop_gimplify_context (dep_bind);
11320 /* Lower the GIMPLE_OMP_TARGET in the current statement
11321 in GSI_P. CTX holds context information for the directive. */
11323 static void
11324 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11326 tree clauses;
11327 tree child_fn, t, c;
11328 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11329 gbind *tgt_bind, *bind, *dep_bind = NULL;
11330 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11331 location_t loc = gimple_location (stmt);
11332 bool offloaded, data_region;
11333 unsigned int map_cnt = 0;
11335 offloaded = is_gimple_omp_offloaded (stmt);
11336 switch (gimple_omp_target_kind (stmt))
11338 case GF_OMP_TARGET_KIND_REGION:
11339 case GF_OMP_TARGET_KIND_UPDATE:
11340 case GF_OMP_TARGET_KIND_ENTER_DATA:
11341 case GF_OMP_TARGET_KIND_EXIT_DATA:
11342 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11343 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11344 case GF_OMP_TARGET_KIND_OACC_SERIAL:
11345 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11346 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11347 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11348 data_region = false;
11349 break;
11350 case GF_OMP_TARGET_KIND_DATA:
11351 case GF_OMP_TARGET_KIND_OACC_DATA:
11352 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11353 data_region = true;
11354 break;
11355 default:
11356 gcc_unreachable ();
11359 clauses = gimple_omp_target_clauses (stmt);
11361 gimple_seq dep_ilist = NULL;
11362 gimple_seq dep_olist = NULL;
11363 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11365 push_gimplify_context ();
11366 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11367 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11368 &dep_ilist, &dep_olist);
11371 tgt_bind = NULL;
11372 tgt_body = NULL;
11373 if (offloaded)
11375 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11376 tgt_body = gimple_bind_body (tgt_bind);
11378 else if (data_region)
11379 tgt_body = gimple_omp_body (stmt);
11380 child_fn = ctx->cb.dst_fn;
11382 push_gimplify_context ();
11383 fplist = NULL;
11385 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11386 switch (OMP_CLAUSE_CODE (c))
11388 tree var, x;
11390 default:
11391 break;
11392 case OMP_CLAUSE_MAP:
11393 #if CHECKING_P
11394 /* First check what we're prepared to handle in the following. */
11395 switch (OMP_CLAUSE_MAP_KIND (c))
11397 case GOMP_MAP_ALLOC:
11398 case GOMP_MAP_TO:
11399 case GOMP_MAP_FROM:
11400 case GOMP_MAP_TOFROM:
11401 case GOMP_MAP_POINTER:
11402 case GOMP_MAP_TO_PSET:
11403 case GOMP_MAP_DELETE:
11404 case GOMP_MAP_RELEASE:
11405 case GOMP_MAP_ALWAYS_TO:
11406 case GOMP_MAP_ALWAYS_FROM:
11407 case GOMP_MAP_ALWAYS_TOFROM:
11408 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11409 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11410 case GOMP_MAP_STRUCT:
11411 case GOMP_MAP_ALWAYS_POINTER:
11412 break;
11413 case GOMP_MAP_IF_PRESENT:
11414 case GOMP_MAP_FORCE_ALLOC:
11415 case GOMP_MAP_FORCE_TO:
11416 case GOMP_MAP_FORCE_FROM:
11417 case GOMP_MAP_FORCE_TOFROM:
11418 case GOMP_MAP_FORCE_PRESENT:
11419 case GOMP_MAP_FORCE_DEVICEPTR:
11420 case GOMP_MAP_DEVICE_RESIDENT:
11421 case GOMP_MAP_LINK:
11422 case GOMP_MAP_ATTACH:
11423 case GOMP_MAP_DETACH:
11424 case GOMP_MAP_FORCE_DETACH:
11425 gcc_assert (is_gimple_omp_oacc (stmt));
11426 break;
11427 default:
11428 gcc_unreachable ();
11430 #endif
11431 /* FALLTHRU */
11432 case OMP_CLAUSE_TO:
11433 case OMP_CLAUSE_FROM:
11434 oacc_firstprivate:
11435 var = OMP_CLAUSE_DECL (c);
11436 if (!DECL_P (var))
11438 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11439 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11440 && (OMP_CLAUSE_MAP_KIND (c)
11441 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11442 map_cnt++;
11443 continue;
11446 if (DECL_SIZE (var)
11447 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11449 tree var2 = DECL_VALUE_EXPR (var);
11450 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11451 var2 = TREE_OPERAND (var2, 0);
11452 gcc_assert (DECL_P (var2));
11453 var = var2;
11456 if (offloaded
11457 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11458 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11459 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11461 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11463 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11464 && varpool_node::get_create (var)->offloadable)
11465 continue;
11467 tree type = build_pointer_type (TREE_TYPE (var));
11468 tree new_var = lookup_decl (var, ctx);
11469 x = create_tmp_var_raw (type, get_name (new_var));
11470 gimple_add_tmp_var (x);
11471 x = build_simple_mem_ref (x);
11472 SET_DECL_VALUE_EXPR (new_var, x);
11473 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11475 continue;
11478 if (!maybe_lookup_field (var, ctx))
11479 continue;
11481 /* Don't remap compute constructs' reduction variables, because the
11482 intermediate result must be local to each gang. */
11483 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11484 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11486 x = build_receiver_ref (var, true, ctx);
11487 tree new_var = lookup_decl (var, ctx);
11489 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11490 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11491 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11492 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11493 x = build_simple_mem_ref (x);
11494 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11496 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11497 if (omp_is_reference (new_var)
11498 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
11499 || DECL_BY_REFERENCE (var)))
11501 /* Create a local object to hold the instance
11502 value. */
11503 tree type = TREE_TYPE (TREE_TYPE (new_var));
11504 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11505 tree inst = create_tmp_var (type, id);
11506 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11507 x = build_fold_addr_expr (inst);
11509 gimplify_assign (new_var, x, &fplist);
11511 else if (DECL_P (new_var))
11513 SET_DECL_VALUE_EXPR (new_var, x);
11514 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11516 else
11517 gcc_unreachable ();
11519 map_cnt++;
11520 break;
11522 case OMP_CLAUSE_FIRSTPRIVATE:
11523 if (is_oacc_parallel_or_serial (ctx))
11524 goto oacc_firstprivate;
11525 map_cnt++;
11526 var = OMP_CLAUSE_DECL (c);
11527 if (!omp_is_reference (var)
11528 && !is_gimple_reg_type (TREE_TYPE (var)))
11530 tree new_var = lookup_decl (var, ctx);
11531 if (is_variable_sized (var))
11533 tree pvar = DECL_VALUE_EXPR (var);
11534 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11535 pvar = TREE_OPERAND (pvar, 0);
11536 gcc_assert (DECL_P (pvar));
11537 tree new_pvar = lookup_decl (pvar, ctx);
11538 x = build_fold_indirect_ref (new_pvar);
11539 TREE_THIS_NOTRAP (x) = 1;
11541 else
11542 x = build_receiver_ref (var, true, ctx);
11543 SET_DECL_VALUE_EXPR (new_var, x);
11544 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11546 break;
11548 case OMP_CLAUSE_PRIVATE:
11549 if (is_gimple_omp_oacc (ctx->stmt))
11550 break;
11551 var = OMP_CLAUSE_DECL (c);
11552 if (is_variable_sized (var))
11554 tree new_var = lookup_decl (var, ctx);
11555 tree pvar = DECL_VALUE_EXPR (var);
11556 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11557 pvar = TREE_OPERAND (pvar, 0);
11558 gcc_assert (DECL_P (pvar));
11559 tree new_pvar = lookup_decl (pvar, ctx);
11560 x = build_fold_indirect_ref (new_pvar);
11561 TREE_THIS_NOTRAP (x) = 1;
11562 SET_DECL_VALUE_EXPR (new_var, x);
11563 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11565 break;
11567 case OMP_CLAUSE_USE_DEVICE_PTR:
11568 case OMP_CLAUSE_USE_DEVICE_ADDR:
11569 case OMP_CLAUSE_IS_DEVICE_PTR:
11570 var = OMP_CLAUSE_DECL (c);
11571 map_cnt++;
11572 if (is_variable_sized (var))
11574 tree new_var = lookup_decl (var, ctx);
11575 tree pvar = DECL_VALUE_EXPR (var);
11576 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11577 pvar = TREE_OPERAND (pvar, 0);
11578 gcc_assert (DECL_P (pvar));
11579 tree new_pvar = lookup_decl (pvar, ctx);
11580 x = build_fold_indirect_ref (new_pvar);
11581 TREE_THIS_NOTRAP (x) = 1;
11582 SET_DECL_VALUE_EXPR (new_var, x);
11583 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11585 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11586 && !omp_is_reference (var)
11587 && !omp_is_allocatable_or_ptr (var)
11588 && !lang_hooks.decls.omp_array_data (var, true))
11589 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11591 tree new_var = lookup_decl (var, ctx);
11592 tree type = build_pointer_type (TREE_TYPE (var));
11593 x = create_tmp_var_raw (type, get_name (new_var));
11594 gimple_add_tmp_var (x);
11595 x = build_simple_mem_ref (x);
11596 SET_DECL_VALUE_EXPR (new_var, x);
11597 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11599 else
11601 tree new_var = lookup_decl (var, ctx);
11602 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11603 gimple_add_tmp_var (x);
11604 SET_DECL_VALUE_EXPR (new_var, x);
11605 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11607 break;
11610 if (offloaded)
11612 target_nesting_level++;
11613 lower_omp (&tgt_body, ctx);
11614 target_nesting_level--;
11616 else if (data_region)
11617 lower_omp (&tgt_body, ctx);
11619 if (offloaded)
11621 /* Declare all the variables created by mapping and the variables
11622 declared in the scope of the target body. */
11623 record_vars_into (ctx->block_vars, child_fn);
11624 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11625 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11628 olist = NULL;
11629 ilist = NULL;
11630 if (ctx->record_type)
11632 ctx->sender_decl
11633 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11634 DECL_NAMELESS (ctx->sender_decl) = 1;
11635 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11636 t = make_tree_vec (3);
11637 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11638 TREE_VEC_ELT (t, 1)
11639 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11640 ".omp_data_sizes");
11641 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11642 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11643 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11644 tree tkind_type = short_unsigned_type_node;
11645 int talign_shift = 8;
11646 TREE_VEC_ELT (t, 2)
11647 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11648 ".omp_data_kinds");
11649 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11650 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11651 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11652 gimple_omp_target_set_data_arg (stmt, t);
11654 vec<constructor_elt, va_gc> *vsize;
11655 vec<constructor_elt, va_gc> *vkind;
11656 vec_alloc (vsize, map_cnt);
11657 vec_alloc (vkind, map_cnt);
11658 unsigned int map_idx = 0;
11660 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11661 switch (OMP_CLAUSE_CODE (c))
11663 tree ovar, nc, s, purpose, var, x, type;
11664 unsigned int talign;
11666 default:
11667 break;
11669 case OMP_CLAUSE_MAP:
11670 case OMP_CLAUSE_TO:
11671 case OMP_CLAUSE_FROM:
11672 oacc_firstprivate_map:
11673 nc = c;
11674 ovar = OMP_CLAUSE_DECL (c);
11675 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11676 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11677 || (OMP_CLAUSE_MAP_KIND (c)
11678 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11679 break;
11680 if (!DECL_P (ovar))
11682 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11683 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11685 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11686 == get_base_address (ovar));
11687 nc = OMP_CLAUSE_CHAIN (c);
11688 ovar = OMP_CLAUSE_DECL (nc);
11690 else
11692 tree x = build_sender_ref (ovar, ctx);
11693 tree v
11694 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11695 gimplify_assign (x, v, &ilist);
11696 nc = NULL_TREE;
11699 else
11701 if (DECL_SIZE (ovar)
11702 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11704 tree ovar2 = DECL_VALUE_EXPR (ovar);
11705 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11706 ovar2 = TREE_OPERAND (ovar2, 0);
11707 gcc_assert (DECL_P (ovar2));
11708 ovar = ovar2;
11710 if (!maybe_lookup_field (ovar, ctx))
11711 continue;
11714 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11715 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11716 talign = DECL_ALIGN_UNIT (ovar);
11717 if (nc)
11719 var = lookup_decl_in_outer_ctx (ovar, ctx);
11720 x = build_sender_ref (ovar, ctx);
11722 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11723 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11724 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11725 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11727 gcc_assert (offloaded);
11728 tree avar
11729 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11730 mark_addressable (avar);
11731 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11732 talign = DECL_ALIGN_UNIT (avar);
11733 avar = build_fold_addr_expr (avar);
11734 gimplify_assign (x, avar, &ilist);
11736 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11738 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11739 if (!omp_is_reference (var))
11741 if (is_gimple_reg (var)
11742 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11743 TREE_NO_WARNING (var) = 1;
11744 var = build_fold_addr_expr (var);
11746 else
11747 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11748 gimplify_assign (x, var, &ilist);
11750 else if (is_gimple_reg (var))
11752 gcc_assert (offloaded);
11753 tree avar = create_tmp_var (TREE_TYPE (var));
11754 mark_addressable (avar);
11755 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11756 if (GOMP_MAP_COPY_TO_P (map_kind)
11757 || map_kind == GOMP_MAP_POINTER
11758 || map_kind == GOMP_MAP_TO_PSET
11759 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11761 /* If we need to initialize a temporary
11762 with VAR because it is not addressable, and
11763 the variable hasn't been initialized yet, then
11764 we'll get a warning for the store to avar.
11765 Don't warn in that case, the mapping might
11766 be implicit. */
11767 TREE_NO_WARNING (var) = 1;
11768 gimplify_assign (avar, var, &ilist);
11770 avar = build_fold_addr_expr (avar);
11771 gimplify_assign (x, avar, &ilist);
11772 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11773 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11774 && !TYPE_READONLY (TREE_TYPE (var)))
11776 x = unshare_expr (x);
11777 x = build_simple_mem_ref (x);
11778 gimplify_assign (var, x, &olist);
11781 else
11783 /* While MAP is handled explicitly by the FE,
11784 for 'target update', only the identified is passed. */
11785 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
11786 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
11787 && (omp_is_allocatable_or_ptr (var)
11788 && omp_check_optional_argument (var, false)))
11789 var = build_fold_indirect_ref (var);
11790 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
11791 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
11792 || (!omp_is_allocatable_or_ptr (var)
11793 && !omp_check_optional_argument (var, false)))
11794 var = build_fold_addr_expr (var);
11795 gimplify_assign (x, var, &ilist);
11798 s = NULL_TREE;
11799 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11801 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11802 s = TREE_TYPE (ovar);
11803 if (TREE_CODE (s) == REFERENCE_TYPE
11804 || omp_check_optional_argument (ovar, false))
11805 s = TREE_TYPE (s);
11806 s = TYPE_SIZE_UNIT (s);
11808 else
11809 s = OMP_CLAUSE_SIZE (c);
11810 if (s == NULL_TREE)
11811 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11812 s = fold_convert (size_type_node, s);
11813 purpose = size_int (map_idx++);
11814 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11815 if (TREE_CODE (s) != INTEGER_CST)
11816 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11818 unsigned HOST_WIDE_INT tkind, tkind_zero;
11819 switch (OMP_CLAUSE_CODE (c))
11821 case OMP_CLAUSE_MAP:
11822 tkind = OMP_CLAUSE_MAP_KIND (c);
11823 tkind_zero = tkind;
11824 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11825 switch (tkind)
11827 case GOMP_MAP_ALLOC:
11828 case GOMP_MAP_IF_PRESENT:
11829 case GOMP_MAP_TO:
11830 case GOMP_MAP_FROM:
11831 case GOMP_MAP_TOFROM:
11832 case GOMP_MAP_ALWAYS_TO:
11833 case GOMP_MAP_ALWAYS_FROM:
11834 case GOMP_MAP_ALWAYS_TOFROM:
11835 case GOMP_MAP_RELEASE:
11836 case GOMP_MAP_FORCE_TO:
11837 case GOMP_MAP_FORCE_FROM:
11838 case GOMP_MAP_FORCE_TOFROM:
11839 case GOMP_MAP_FORCE_PRESENT:
11840 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11841 break;
11842 case GOMP_MAP_DELETE:
11843 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11844 default:
11845 break;
11847 if (tkind_zero != tkind)
11849 if (integer_zerop (s))
11850 tkind = tkind_zero;
11851 else if (integer_nonzerop (s))
11852 tkind_zero = tkind;
11854 break;
11855 case OMP_CLAUSE_FIRSTPRIVATE:
11856 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11857 tkind = GOMP_MAP_TO;
11858 tkind_zero = tkind;
11859 break;
11860 case OMP_CLAUSE_TO:
11861 tkind = GOMP_MAP_TO;
11862 tkind_zero = tkind;
11863 break;
11864 case OMP_CLAUSE_FROM:
11865 tkind = GOMP_MAP_FROM;
11866 tkind_zero = tkind;
11867 break;
11868 default:
11869 gcc_unreachable ();
11871 gcc_checking_assert (tkind
11872 < (HOST_WIDE_INT_C (1U) << talign_shift));
11873 gcc_checking_assert (tkind_zero
11874 < (HOST_WIDE_INT_C (1U) << talign_shift));
11875 talign = ceil_log2 (talign);
11876 tkind |= talign << talign_shift;
11877 tkind_zero |= talign << talign_shift;
11878 gcc_checking_assert (tkind
11879 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11880 gcc_checking_assert (tkind_zero
11881 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11882 if (tkind == tkind_zero)
11883 x = build_int_cstu (tkind_type, tkind);
11884 else
11886 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
11887 x = build3 (COND_EXPR, tkind_type,
11888 fold_build2 (EQ_EXPR, boolean_type_node,
11889 unshare_expr (s), size_zero_node),
11890 build_int_cstu (tkind_type, tkind_zero),
11891 build_int_cstu (tkind_type, tkind));
11893 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
11894 if (nc && nc != c)
11895 c = nc;
11896 break;
11898 case OMP_CLAUSE_FIRSTPRIVATE:
11899 if (is_oacc_parallel_or_serial (ctx))
11900 goto oacc_firstprivate_map;
11901 ovar = OMP_CLAUSE_DECL (c);
11902 if (omp_is_reference (ovar))
11903 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11904 else
11905 talign = DECL_ALIGN_UNIT (ovar);
11906 var = lookup_decl_in_outer_ctx (ovar, ctx);
11907 x = build_sender_ref (ovar, ctx);
11908 tkind = GOMP_MAP_FIRSTPRIVATE;
11909 type = TREE_TYPE (ovar);
11910 if (omp_is_reference (ovar))
11911 type = TREE_TYPE (type);
11912 if ((INTEGRAL_TYPE_P (type)
11913 && TYPE_PRECISION (type) <= POINTER_SIZE)
11914 || TREE_CODE (type) == POINTER_TYPE)
11916 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11917 tree t = var;
11918 if (omp_is_reference (var))
11919 t = build_simple_mem_ref (var);
11920 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11921 TREE_NO_WARNING (var) = 1;
11922 if (TREE_CODE (type) != POINTER_TYPE)
11923 t = fold_convert (pointer_sized_int_node, t);
11924 t = fold_convert (TREE_TYPE (x), t);
11925 gimplify_assign (x, t, &ilist);
11927 else if (omp_is_reference (var))
11928 gimplify_assign (x, var, &ilist);
11929 else if (is_gimple_reg (var))
11931 tree avar = create_tmp_var (TREE_TYPE (var));
11932 mark_addressable (avar);
11933 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11934 TREE_NO_WARNING (var) = 1;
11935 gimplify_assign (avar, var, &ilist);
11936 avar = build_fold_addr_expr (avar);
11937 gimplify_assign (x, avar, &ilist);
11939 else
11941 var = build_fold_addr_expr (var);
11942 gimplify_assign (x, var, &ilist);
11944 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
11945 s = size_int (0);
11946 else if (omp_is_reference (ovar))
11947 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11948 else
11949 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11950 s = fold_convert (size_type_node, s);
11951 purpose = size_int (map_idx++);
11952 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11953 if (TREE_CODE (s) != INTEGER_CST)
11954 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11956 gcc_checking_assert (tkind
11957 < (HOST_WIDE_INT_C (1U) << talign_shift));
11958 talign = ceil_log2 (talign);
11959 tkind |= talign << talign_shift;
11960 gcc_checking_assert (tkind
11961 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11962 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11963 build_int_cstu (tkind_type, tkind));
11964 break;
11966 case OMP_CLAUSE_USE_DEVICE_PTR:
11967 case OMP_CLAUSE_USE_DEVICE_ADDR:
11968 case OMP_CLAUSE_IS_DEVICE_PTR:
11969 ovar = OMP_CLAUSE_DECL (c);
11970 var = lookup_decl_in_outer_ctx (ovar, ctx);
11972 if (lang_hooks.decls.omp_array_data (ovar, true))
11974 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
11975 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
11976 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
11978 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
11980 tkind = GOMP_MAP_USE_DEVICE_PTR;
11981 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
11983 else
11985 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11986 x = build_sender_ref (ovar, ctx);
11989 if (is_gimple_omp_oacc (ctx->stmt))
11991 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
11993 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
11994 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
11997 type = TREE_TYPE (ovar);
11998 if (lang_hooks.decls.omp_array_data (ovar, true))
11999 var = lang_hooks.decls.omp_array_data (ovar, false);
12000 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12001 && !omp_is_reference (ovar)
12002 && !omp_is_allocatable_or_ptr (ovar))
12003 || TREE_CODE (type) == ARRAY_TYPE)
12004 var = build_fold_addr_expr (var);
12005 else
12007 if (omp_is_reference (ovar)
12008 || omp_check_optional_argument (ovar, false)
12009 || omp_is_allocatable_or_ptr (ovar))
12011 type = TREE_TYPE (type);
12012 if (TREE_CODE (type) != ARRAY_TYPE
12013 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12014 && !omp_is_allocatable_or_ptr (ovar))
12015 || (omp_is_reference (ovar)
12016 && omp_is_allocatable_or_ptr (ovar))))
12017 var = build_simple_mem_ref (var);
12018 var = fold_convert (TREE_TYPE (x), var);
12021 tree present;
12022 present = omp_check_optional_argument (ovar, true);
12023 if (present)
12025 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12026 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12027 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12028 tree new_x = unshare_expr (x);
12029 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
12030 fb_rvalue);
12031 gcond *cond = gimple_build_cond_from_tree (present,
12032 notnull_label,
12033 null_label);
12034 gimple_seq_add_stmt (&ilist, cond);
12035 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
12036 gimplify_assign (new_x, null_pointer_node, &ilist);
12037 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
12038 gimple_seq_add_stmt (&ilist,
12039 gimple_build_label (notnull_label));
12040 gimplify_assign (x, var, &ilist);
12041 gimple_seq_add_stmt (&ilist,
12042 gimple_build_label (opt_arg_label));
12044 else
12045 gimplify_assign (x, var, &ilist);
12046 s = size_int (0);
12047 purpose = size_int (map_idx++);
12048 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12049 gcc_checking_assert (tkind
12050 < (HOST_WIDE_INT_C (1U) << talign_shift));
12051 gcc_checking_assert (tkind
12052 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12053 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12054 build_int_cstu (tkind_type, tkind));
12055 break;
12058 gcc_assert (map_idx == map_cnt);
12060 DECL_INITIAL (TREE_VEC_ELT (t, 1))
12061 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
12062 DECL_INITIAL (TREE_VEC_ELT (t, 2))
12063 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
12064 for (int i = 1; i <= 2; i++)
12065 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
12067 gimple_seq initlist = NULL;
12068 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
12069 TREE_VEC_ELT (t, i)),
12070 &initlist, true, NULL_TREE);
12071 gimple_seq_add_seq (&ilist, initlist);
12073 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
12074 gimple_seq_add_stmt (&olist,
12075 gimple_build_assign (TREE_VEC_ELT (t, i),
12076 clobber));
12079 tree clobber = build_clobber (ctx->record_type);
12080 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12081 clobber));
12084 /* Once all the expansions are done, sequence all the different
12085 fragments inside gimple_omp_body. */
12087 new_body = NULL;
12089 if (offloaded
12090 && ctx->record_type)
12092 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12093 /* fixup_child_record_type might have changed receiver_decl's type. */
12094 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12095 gimple_seq_add_stmt (&new_body,
12096 gimple_build_assign (ctx->receiver_decl, t));
12098 gimple_seq_add_seq (&new_body, fplist);
12100 if (offloaded || data_region)
12102 tree prev = NULL_TREE;
12103 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12104 switch (OMP_CLAUSE_CODE (c))
12106 tree var, x;
12107 default:
12108 break;
12109 case OMP_CLAUSE_FIRSTPRIVATE:
12110 if (is_gimple_omp_oacc (ctx->stmt))
12111 break;
12112 var = OMP_CLAUSE_DECL (c);
12113 if (omp_is_reference (var)
12114 || is_gimple_reg_type (TREE_TYPE (var)))
12116 tree new_var = lookup_decl (var, ctx);
12117 tree type;
12118 type = TREE_TYPE (var);
12119 if (omp_is_reference (var))
12120 type = TREE_TYPE (type);
12121 if ((INTEGRAL_TYPE_P (type)
12122 && TYPE_PRECISION (type) <= POINTER_SIZE)
12123 || TREE_CODE (type) == POINTER_TYPE)
12125 x = build_receiver_ref (var, false, ctx);
12126 if (TREE_CODE (type) != POINTER_TYPE)
12127 x = fold_convert (pointer_sized_int_node, x);
12128 x = fold_convert (type, x);
12129 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12130 fb_rvalue);
12131 if (omp_is_reference (var))
12133 tree v = create_tmp_var_raw (type, get_name (var));
12134 gimple_add_tmp_var (v);
12135 TREE_ADDRESSABLE (v) = 1;
12136 gimple_seq_add_stmt (&new_body,
12137 gimple_build_assign (v, x));
12138 x = build_fold_addr_expr (v);
12140 gimple_seq_add_stmt (&new_body,
12141 gimple_build_assign (new_var, x));
12143 else
12145 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
12146 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12147 fb_rvalue);
12148 gimple_seq_add_stmt (&new_body,
12149 gimple_build_assign (new_var, x));
12152 else if (is_variable_sized (var))
12154 tree pvar = DECL_VALUE_EXPR (var);
12155 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12156 pvar = TREE_OPERAND (pvar, 0);
12157 gcc_assert (DECL_P (pvar));
12158 tree new_var = lookup_decl (pvar, ctx);
12159 x = build_receiver_ref (var, false, ctx);
12160 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12161 gimple_seq_add_stmt (&new_body,
12162 gimple_build_assign (new_var, x));
12164 break;
12165 case OMP_CLAUSE_PRIVATE:
12166 if (is_gimple_omp_oacc (ctx->stmt))
12167 break;
12168 var = OMP_CLAUSE_DECL (c);
12169 if (omp_is_reference (var))
12171 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12172 tree new_var = lookup_decl (var, ctx);
12173 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12174 if (TREE_CONSTANT (x))
12176 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12177 get_name (var));
12178 gimple_add_tmp_var (x);
12179 TREE_ADDRESSABLE (x) = 1;
12180 x = build_fold_addr_expr_loc (clause_loc, x);
12182 else
12183 break;
12185 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12186 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12187 gimple_seq_add_stmt (&new_body,
12188 gimple_build_assign (new_var, x));
12190 break;
12191 case OMP_CLAUSE_USE_DEVICE_PTR:
12192 case OMP_CLAUSE_USE_DEVICE_ADDR:
12193 case OMP_CLAUSE_IS_DEVICE_PTR:
12194 tree new_var;
12195 gimple_seq assign_body;
12196 bool is_array_data;
12197 bool do_optional_check;
12198 assign_body = NULL;
12199 do_optional_check = false;
12200 var = OMP_CLAUSE_DECL (c);
12201 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
12203 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12204 x = build_sender_ref (is_array_data
12205 ? (splay_tree_key) &DECL_NAME (var)
12206 : (splay_tree_key) &DECL_UID (var), ctx);
12207 else
12208 x = build_receiver_ref (var, false, ctx);
12210 if (is_array_data)
12212 bool is_ref = omp_is_reference (var);
12213 do_optional_check = true;
12214 /* First, we copy the descriptor data from the host; then
12215 we update its data to point to the target address. */
12216 new_var = lookup_decl (var, ctx);
12217 new_var = DECL_VALUE_EXPR (new_var);
12218 tree v = new_var;
12220 if (is_ref)
12222 var = build_fold_indirect_ref (var);
12223 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
12224 fb_rvalue);
12225 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
12226 gimple_add_tmp_var (v);
12227 TREE_ADDRESSABLE (v) = 1;
12228 gimple_seq_add_stmt (&assign_body,
12229 gimple_build_assign (v, var));
12230 tree rhs = build_fold_addr_expr (v);
12231 gimple_seq_add_stmt (&assign_body,
12232 gimple_build_assign (new_var, rhs));
12234 else
12235 gimple_seq_add_stmt (&assign_body,
12236 gimple_build_assign (new_var, var));
12238 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
12239 gcc_assert (v2);
12240 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12241 gimple_seq_add_stmt (&assign_body,
12242 gimple_build_assign (v2, x));
12244 else if (is_variable_sized (var))
12246 tree pvar = DECL_VALUE_EXPR (var);
12247 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12248 pvar = TREE_OPERAND (pvar, 0);
12249 gcc_assert (DECL_P (pvar));
12250 new_var = lookup_decl (pvar, ctx);
12251 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12252 gimple_seq_add_stmt (&assign_body,
12253 gimple_build_assign (new_var, x));
12255 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12256 && !omp_is_reference (var)
12257 && !omp_is_allocatable_or_ptr (var))
12258 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12260 new_var = lookup_decl (var, ctx);
12261 new_var = DECL_VALUE_EXPR (new_var);
12262 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12263 new_var = TREE_OPERAND (new_var, 0);
12264 gcc_assert (DECL_P (new_var));
12265 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12266 gimple_seq_add_stmt (&assign_body,
12267 gimple_build_assign (new_var, x));
12269 else
12271 tree type = TREE_TYPE (var);
12272 new_var = lookup_decl (var, ctx);
12273 if (omp_is_reference (var))
12275 type = TREE_TYPE (type);
12276 if (TREE_CODE (type) != ARRAY_TYPE
12277 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12278 || (omp_is_reference (var)
12279 && omp_is_allocatable_or_ptr (var))))
12281 tree v = create_tmp_var_raw (type, get_name (var));
12282 gimple_add_tmp_var (v);
12283 TREE_ADDRESSABLE (v) = 1;
12284 x = fold_convert (type, x);
12285 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
12286 fb_rvalue);
12287 gimple_seq_add_stmt (&assign_body,
12288 gimple_build_assign (v, x));
12289 x = build_fold_addr_expr (v);
12290 do_optional_check = true;
12293 new_var = DECL_VALUE_EXPR (new_var);
12294 x = fold_convert (TREE_TYPE (new_var), x);
12295 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12296 gimple_seq_add_stmt (&assign_body,
12297 gimple_build_assign (new_var, x));
12299 tree present;
12300 present = (do_optional_check
12301 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
12302 : NULL_TREE);
12303 if (present)
12305 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12306 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12307 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12308 glabel *null_glabel = gimple_build_label (null_label);
12309 glabel *notnull_glabel = gimple_build_label (notnull_label);
12310 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
12311 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12312 fb_rvalue);
12313 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
12314 fb_rvalue);
12315 gcond *cond = gimple_build_cond_from_tree (present,
12316 notnull_label,
12317 null_label);
12318 gimple_seq_add_stmt (&new_body, cond);
12319 gimple_seq_add_stmt (&new_body, null_glabel);
12320 gimplify_assign (new_var, null_pointer_node, &new_body);
12321 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
12322 gimple_seq_add_stmt (&new_body, notnull_glabel);
12323 gimple_seq_add_seq (&new_body, assign_body);
12324 gimple_seq_add_stmt (&new_body,
12325 gimple_build_label (opt_arg_label));
12327 else
12328 gimple_seq_add_seq (&new_body, assign_body);
12329 break;
12331 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12332 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12333 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12334 or references to VLAs. */
12335 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12336 switch (OMP_CLAUSE_CODE (c))
12338 tree var;
12339 default:
12340 break;
12341 case OMP_CLAUSE_MAP:
12342 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12343 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12345 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12346 poly_int64 offset = 0;
12347 gcc_assert (prev);
12348 var = OMP_CLAUSE_DECL (c);
12349 if (DECL_P (var)
12350 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12351 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12352 ctx))
12353 && varpool_node::get_create (var)->offloadable)
12354 break;
12355 if (TREE_CODE (var) == INDIRECT_REF
12356 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12357 var = TREE_OPERAND (var, 0);
12358 if (TREE_CODE (var) == COMPONENT_REF)
12360 var = get_addr_base_and_unit_offset (var, &offset);
12361 gcc_assert (var != NULL_TREE && DECL_P (var));
12363 else if (DECL_SIZE (var)
12364 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12366 tree var2 = DECL_VALUE_EXPR (var);
12367 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12368 var2 = TREE_OPERAND (var2, 0);
12369 gcc_assert (DECL_P (var2));
12370 var = var2;
12372 tree new_var = lookup_decl (var, ctx), x;
12373 tree type = TREE_TYPE (new_var);
12374 bool is_ref;
12375 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12376 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12377 == COMPONENT_REF))
12379 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12380 is_ref = true;
12381 new_var = build2 (MEM_REF, type,
12382 build_fold_addr_expr (new_var),
12383 build_int_cst (build_pointer_type (type),
12384 offset));
12386 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12388 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12389 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12390 new_var = build2 (MEM_REF, type,
12391 build_fold_addr_expr (new_var),
12392 build_int_cst (build_pointer_type (type),
12393 offset));
12395 else
12396 is_ref = omp_is_reference (var);
12397 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12398 is_ref = false;
12399 bool ref_to_array = false;
12400 if (is_ref)
12402 type = TREE_TYPE (type);
12403 if (TREE_CODE (type) == ARRAY_TYPE)
12405 type = build_pointer_type (type);
12406 ref_to_array = true;
12409 else if (TREE_CODE (type) == ARRAY_TYPE)
12411 tree decl2 = DECL_VALUE_EXPR (new_var);
12412 gcc_assert (TREE_CODE (decl2) == MEM_REF);
12413 decl2 = TREE_OPERAND (decl2, 0);
12414 gcc_assert (DECL_P (decl2));
12415 new_var = decl2;
12416 type = TREE_TYPE (new_var);
12418 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12419 x = fold_convert_loc (clause_loc, type, x);
12420 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12422 tree bias = OMP_CLAUSE_SIZE (c);
12423 if (DECL_P (bias))
12424 bias = lookup_decl (bias, ctx);
12425 bias = fold_convert_loc (clause_loc, sizetype, bias);
12426 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12427 bias);
12428 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12429 TREE_TYPE (x), x, bias);
12431 if (ref_to_array)
12432 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12433 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12434 if (is_ref && !ref_to_array)
12436 tree t = create_tmp_var_raw (type, get_name (var));
12437 gimple_add_tmp_var (t);
12438 TREE_ADDRESSABLE (t) = 1;
12439 gimple_seq_add_stmt (&new_body,
12440 gimple_build_assign (t, x));
12441 x = build_fold_addr_expr_loc (clause_loc, t);
12443 gimple_seq_add_stmt (&new_body,
12444 gimple_build_assign (new_var, x));
12445 prev = NULL_TREE;
12447 else if (OMP_CLAUSE_CHAIN (c)
12448 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12449 == OMP_CLAUSE_MAP
12450 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12451 == GOMP_MAP_FIRSTPRIVATE_POINTER
12452 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12453 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12454 prev = c;
12455 break;
12456 case OMP_CLAUSE_PRIVATE:
12457 var = OMP_CLAUSE_DECL (c);
12458 if (is_variable_sized (var))
12460 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12461 tree new_var = lookup_decl (var, ctx);
12462 tree pvar = DECL_VALUE_EXPR (var);
12463 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12464 pvar = TREE_OPERAND (pvar, 0);
12465 gcc_assert (DECL_P (pvar));
12466 tree new_pvar = lookup_decl (pvar, ctx);
12467 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12468 tree al = size_int (DECL_ALIGN (var));
12469 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12470 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12471 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12472 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12473 gimple_seq_add_stmt (&new_body,
12474 gimple_build_assign (new_pvar, x));
12476 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12478 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12479 tree new_var = lookup_decl (var, ctx);
12480 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12481 if (TREE_CONSTANT (x))
12482 break;
12483 else
12485 tree atmp
12486 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12487 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12488 tree al = size_int (TYPE_ALIGN (rtype));
12489 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12492 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12493 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12494 gimple_seq_add_stmt (&new_body,
12495 gimple_build_assign (new_var, x));
12497 break;
12500 gimple_seq fork_seq = NULL;
12501 gimple_seq join_seq = NULL;
12503 if (is_oacc_parallel_or_serial (ctx))
12505 /* If there are reductions on the offloaded region itself, treat
12506 them as a dummy GANG loop. */
12507 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
12509 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12510 false, NULL, NULL, &fork_seq, &join_seq, ctx);
12513 gimple_seq_add_seq (&new_body, fork_seq);
12514 gimple_seq_add_seq (&new_body, tgt_body);
12515 gimple_seq_add_seq (&new_body, join_seq);
12517 if (offloaded)
12518 new_body = maybe_catch_exception (new_body);
12520 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12521 gimple_omp_set_body (stmt, new_body);
12524 bind = gimple_build_bind (NULL, NULL,
12525 tgt_bind ? gimple_bind_block (tgt_bind)
12526 : NULL_TREE);
12527 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12528 gimple_bind_add_seq (bind, ilist);
12529 gimple_bind_add_stmt (bind, stmt);
12530 gimple_bind_add_seq (bind, olist);
12532 pop_gimplify_context (NULL);
12534 if (dep_bind)
12536 gimple_bind_add_seq (dep_bind, dep_ilist);
12537 gimple_bind_add_stmt (dep_bind, bind);
12538 gimple_bind_add_seq (dep_bind, dep_olist);
12539 pop_gimplify_context (dep_bind);
12543 /* Expand code for an OpenMP teams directive. */
12545 static void
12546 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12548 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
12549 push_gimplify_context ();
12551 tree block = make_node (BLOCK);
12552 gbind *bind = gimple_build_bind (NULL, NULL, block);
12553 gsi_replace (gsi_p, bind, true);
12554 gimple_seq bind_body = NULL;
12555 gimple_seq dlist = NULL;
12556 gimple_seq olist = NULL;
12558 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12559 OMP_CLAUSE_NUM_TEAMS);
12560 if (num_teams == NULL_TREE)
12561 num_teams = build_int_cst (unsigned_type_node, 0);
12562 else
12564 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
12565 num_teams = fold_convert (unsigned_type_node, num_teams);
12566 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
12568 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12569 OMP_CLAUSE_THREAD_LIMIT);
12570 if (thread_limit == NULL_TREE)
12571 thread_limit = build_int_cst (unsigned_type_node, 0);
12572 else
12574 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
12575 thread_limit = fold_convert (unsigned_type_node, thread_limit);
12576 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
12577 fb_rvalue);
12580 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
12581 &bind_body, &dlist, ctx, NULL);
12582 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
12583 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
12584 NULL, ctx);
12585 gimple_seq_add_stmt (&bind_body, teams_stmt);
12587 location_t loc = gimple_location (teams_stmt);
12588 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
12589 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
12590 gimple_set_location (call, loc);
12591 gimple_seq_add_stmt (&bind_body, call);
12593 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
12594 gimple_omp_set_body (teams_stmt, NULL);
12595 gimple_seq_add_seq (&bind_body, olist);
12596 gimple_seq_add_seq (&bind_body, dlist);
12597 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
12598 gimple_bind_set_body (bind, bind_body);
12600 pop_gimplify_context (bind);
12602 gimple_bind_append_vars (bind, ctx->block_vars);
12603 BLOCK_VARS (block) = ctx->block_vars;
12604 if (BLOCK_VARS (block))
12605 TREE_USED (block) = 1;
12608 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12609 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12610 of OMP context, but with task_shared_vars set. */
12612 static tree
12613 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
12614 void *data)
12616 tree t = *tp;
12618 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12619 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
12620 return t;
12622 if (task_shared_vars
12623 && DECL_P (t)
12624 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
12625 return t;
12627 /* If a global variable has been privatized, TREE_CONSTANT on
12628 ADDR_EXPR might be wrong. */
12629 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
12630 recompute_tree_invariant_for_addr_expr (t);
12632 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
12633 return NULL_TREE;
12636 /* Data to be communicated between lower_omp_regimplify_operands and
12637 lower_omp_regimplify_operands_p. */
12639 struct lower_omp_regimplify_operands_data
12641 omp_context *ctx;
12642 vec<tree> *decls;
12645 /* Helper function for lower_omp_regimplify_operands. Find
12646 omp_member_access_dummy_var vars and adjust temporarily their
12647 DECL_VALUE_EXPRs if needed. */
12649 static tree
12650 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
12651 void *data)
12653 tree t = omp_member_access_dummy_var (*tp);
12654 if (t)
12656 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12657 lower_omp_regimplify_operands_data *ldata
12658 = (lower_omp_regimplify_operands_data *) wi->info;
12659 tree o = maybe_lookup_decl (t, ldata->ctx);
12660 if (o != t)
12662 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
12663 ldata->decls->safe_push (*tp);
12664 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
12665 SET_DECL_VALUE_EXPR (*tp, v);
12668 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
12669 return NULL_TREE;
12672 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12673 of omp_member_access_dummy_var vars during regimplification. */
12675 static void
12676 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
12677 gimple_stmt_iterator *gsi_p)
12679 auto_vec<tree, 10> decls;
12680 if (ctx)
12682 struct walk_stmt_info wi;
12683 memset (&wi, '\0', sizeof (wi));
12684 struct lower_omp_regimplify_operands_data data;
12685 data.ctx = ctx;
12686 data.decls = &decls;
12687 wi.info = &data;
12688 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
12690 gimple_regimplify_operands (stmt, gsi_p);
12691 while (!decls.is_empty ())
12693 tree t = decls.pop ();
12694 tree v = decls.pop ();
12695 SET_DECL_VALUE_EXPR (t, v);
12699 static void
12700 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12702 gimple *stmt = gsi_stmt (*gsi_p);
12703 struct walk_stmt_info wi;
12704 gcall *call_stmt;
12706 if (gimple_has_location (stmt))
12707 input_location = gimple_location (stmt);
12709 if (task_shared_vars)
12710 memset (&wi, '\0', sizeof (wi));
12712 /* If we have issued syntax errors, avoid doing any heavy lifting.
12713 Just replace the OMP directives with a NOP to avoid
12714 confusing RTL expansion. */
12715 if (seen_error () && is_gimple_omp (stmt))
12717 gsi_replace (gsi_p, gimple_build_nop (), true);
12718 return;
12721 switch (gimple_code (stmt))
12723 case GIMPLE_COND:
12725 gcond *cond_stmt = as_a <gcond *> (stmt);
12726 if ((ctx || task_shared_vars)
12727 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12728 lower_omp_regimplify_p,
12729 ctx ? NULL : &wi, NULL)
12730 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12731 lower_omp_regimplify_p,
12732 ctx ? NULL : &wi, NULL)))
12733 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12735 break;
12736 case GIMPLE_CATCH:
12737 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12738 break;
12739 case GIMPLE_EH_FILTER:
12740 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12741 break;
12742 case GIMPLE_TRY:
12743 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12744 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12745 break;
12746 case GIMPLE_TRANSACTION:
12747 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12748 ctx);
12749 break;
12750 case GIMPLE_BIND:
12751 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12752 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12753 break;
12754 case GIMPLE_OMP_PARALLEL:
12755 case GIMPLE_OMP_TASK:
12756 ctx = maybe_lookup_ctx (stmt);
12757 gcc_assert (ctx);
12758 if (ctx->cancellable)
12759 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12760 lower_omp_taskreg (gsi_p, ctx);
12761 break;
12762 case GIMPLE_OMP_FOR:
12763 ctx = maybe_lookup_ctx (stmt);
12764 gcc_assert (ctx);
12765 if (ctx->cancellable)
12766 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12767 lower_omp_for (gsi_p, ctx);
12768 break;
12769 case GIMPLE_OMP_SECTIONS:
12770 ctx = maybe_lookup_ctx (stmt);
12771 gcc_assert (ctx);
12772 if (ctx->cancellable)
12773 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12774 lower_omp_sections (gsi_p, ctx);
12775 break;
12776 case GIMPLE_OMP_SINGLE:
12777 ctx = maybe_lookup_ctx (stmt);
12778 gcc_assert (ctx);
12779 lower_omp_single (gsi_p, ctx);
12780 break;
12781 case GIMPLE_OMP_MASTER:
12782 ctx = maybe_lookup_ctx (stmt);
12783 gcc_assert (ctx);
12784 lower_omp_master (gsi_p, ctx);
12785 break;
12786 case GIMPLE_OMP_TASKGROUP:
12787 ctx = maybe_lookup_ctx (stmt);
12788 gcc_assert (ctx);
12789 lower_omp_taskgroup (gsi_p, ctx);
12790 break;
12791 case GIMPLE_OMP_ORDERED:
12792 ctx = maybe_lookup_ctx (stmt);
12793 gcc_assert (ctx);
12794 lower_omp_ordered (gsi_p, ctx);
12795 break;
12796 case GIMPLE_OMP_SCAN:
12797 ctx = maybe_lookup_ctx (stmt);
12798 gcc_assert (ctx);
12799 lower_omp_scan (gsi_p, ctx);
12800 break;
12801 case GIMPLE_OMP_CRITICAL:
12802 ctx = maybe_lookup_ctx (stmt);
12803 gcc_assert (ctx);
12804 lower_omp_critical (gsi_p, ctx);
12805 break;
12806 case GIMPLE_OMP_ATOMIC_LOAD:
12807 if ((ctx || task_shared_vars)
12808 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12809 as_a <gomp_atomic_load *> (stmt)),
12810 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12811 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12812 break;
12813 case GIMPLE_OMP_TARGET:
12814 ctx = maybe_lookup_ctx (stmt);
12815 gcc_assert (ctx);
12816 lower_omp_target (gsi_p, ctx);
12817 break;
12818 case GIMPLE_OMP_TEAMS:
12819 ctx = maybe_lookup_ctx (stmt);
12820 gcc_assert (ctx);
12821 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12822 lower_omp_taskreg (gsi_p, ctx);
12823 else
12824 lower_omp_teams (gsi_p, ctx);
12825 break;
12826 case GIMPLE_CALL:
12827 tree fndecl;
12828 call_stmt = as_a <gcall *> (stmt);
12829 fndecl = gimple_call_fndecl (call_stmt);
12830 if (fndecl
12831 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12832 switch (DECL_FUNCTION_CODE (fndecl))
12834 case BUILT_IN_GOMP_BARRIER:
12835 if (ctx == NULL)
12836 break;
12837 /* FALLTHRU */
12838 case BUILT_IN_GOMP_CANCEL:
12839 case BUILT_IN_GOMP_CANCELLATION_POINT:
12840 omp_context *cctx;
12841 cctx = ctx;
12842 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12843 cctx = cctx->outer;
12844 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12845 if (!cctx->cancellable)
12847 if (DECL_FUNCTION_CODE (fndecl)
12848 == BUILT_IN_GOMP_CANCELLATION_POINT)
12850 stmt = gimple_build_nop ();
12851 gsi_replace (gsi_p, stmt, false);
12853 break;
12855 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12857 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
12858 gimple_call_set_fndecl (call_stmt, fndecl);
12859 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
12861 tree lhs;
12862 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
12863 gimple_call_set_lhs (call_stmt, lhs);
12864 tree fallthru_label;
12865 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
12866 gimple *g;
12867 g = gimple_build_label (fallthru_label);
12868 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12869 g = gimple_build_cond (NE_EXPR, lhs,
12870 fold_convert (TREE_TYPE (lhs),
12871 boolean_false_node),
12872 cctx->cancel_label, fallthru_label);
12873 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12874 break;
12875 default:
12876 break;
12878 goto regimplify;
12880 case GIMPLE_ASSIGN:
12881 for (omp_context *up = ctx; up; up = up->outer)
12883 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
12884 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
12885 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
12886 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
12887 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
12888 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
12889 && (gimple_omp_target_kind (up->stmt)
12890 == GF_OMP_TARGET_KIND_DATA)))
12891 continue;
12892 else if (!up->lastprivate_conditional_map)
12893 break;
12894 tree lhs = get_base_address (gimple_assign_lhs (stmt));
12895 if (TREE_CODE (lhs) == MEM_REF
12896 && DECL_P (TREE_OPERAND (lhs, 0))
12897 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
12898 0))) == REFERENCE_TYPE)
12899 lhs = TREE_OPERAND (lhs, 0);
12900 if (DECL_P (lhs))
12901 if (tree *v = up->lastprivate_conditional_map->get (lhs))
12903 tree clauses;
12904 if (up->combined_into_simd_safelen1)
12906 up = up->outer;
12907 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
12908 up = up->outer;
12910 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
12911 clauses = gimple_omp_for_clauses (up->stmt);
12912 else
12913 clauses = gimple_omp_sections_clauses (up->stmt);
12914 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
12915 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
12916 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12917 OMP_CLAUSE__CONDTEMP_);
12918 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
12919 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
12920 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12923 /* FALLTHRU */
12925 default:
12926 regimplify:
12927 if ((ctx || task_shared_vars)
12928 && walk_gimple_op (stmt, lower_omp_regimplify_p,
12929 ctx ? NULL : &wi))
12931 /* Just remove clobbers, this should happen only if we have
12932 "privatized" local addressable variables in SIMD regions,
12933 the clobber isn't needed in that case and gimplifying address
12934 of the ARRAY_REF into a pointer and creating MEM_REF based
12935 clobber would create worse code than we get with the clobber
12936 dropped. */
12937 if (gimple_clobber_p (stmt))
12939 gsi_replace (gsi_p, gimple_build_nop (), true);
12940 break;
12942 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12944 break;
12948 static void
12949 lower_omp (gimple_seq *body, omp_context *ctx)
12951 location_t saved_location = input_location;
12952 gimple_stmt_iterator gsi;
12953 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12954 lower_omp_1 (&gsi, ctx);
12955 /* During gimplification, we haven't folded statments inside offloading
12956 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12957 if (target_nesting_level || taskreg_nesting_level)
12958 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12959 fold_stmt (&gsi);
12960 input_location = saved_location;
12963 /* Main entry point. */
12965 static unsigned int
12966 execute_lower_omp (void)
12968 gimple_seq body;
12969 int i;
12970 omp_context *ctx;
12972 /* This pass always runs, to provide PROP_gimple_lomp.
12973 But often, there is nothing to do. */
12974 if (flag_openacc == 0 && flag_openmp == 0
12975 && flag_openmp_simd == 0)
12976 return 0;
12978 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
12979 delete_omp_context);
12981 body = gimple_body (current_function_decl);
12983 scan_omp (&body, NULL);
12984 gcc_assert (taskreg_nesting_level == 0);
12985 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
12986 finish_taskreg_scan (ctx);
12987 taskreg_contexts.release ();
12989 if (all_contexts->root)
12991 if (task_shared_vars)
12992 push_gimplify_context ();
12993 lower_omp (&body, NULL);
12994 if (task_shared_vars)
12995 pop_gimplify_context (NULL);
12998 if (all_contexts)
13000 splay_tree_delete (all_contexts);
13001 all_contexts = NULL;
13003 BITMAP_FREE (task_shared_vars);
13004 BITMAP_FREE (global_nonaddressable_vars);
13006 /* If current function is a method, remove artificial dummy VAR_DECL created
13007 for non-static data member privatization, they aren't needed for
13008 debuginfo nor anything else, have been already replaced everywhere in the
13009 IL and cause problems with LTO. */
13010 if (DECL_ARGUMENTS (current_function_decl)
13011 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
13012 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
13013 == POINTER_TYPE))
13014 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
13015 return 0;
13018 namespace {
13020 const pass_data pass_data_lower_omp =
13022 GIMPLE_PASS, /* type */
13023 "omplower", /* name */
13024 OPTGROUP_OMP, /* optinfo_flags */
13025 TV_NONE, /* tv_id */
13026 PROP_gimple_any, /* properties_required */
13027 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
13028 0, /* properties_destroyed */
13029 0, /* todo_flags_start */
13030 0, /* todo_flags_finish */
13033 class pass_lower_omp : public gimple_opt_pass
13035 public:
13036 pass_lower_omp (gcc::context *ctxt)
13037 : gimple_opt_pass (pass_data_lower_omp, ctxt)
13040 /* opt_pass methods: */
13041 virtual unsigned int execute (function *) { return execute_lower_omp (); }
13043 }; // class pass_lower_omp
13045 } // anon namespace
13047 gimple_opt_pass *
13048 make_pass_lower_omp (gcc::context *ctxt)
13050 return new pass_lower_omp (ctxt);
13053 /* The following is a utility to diagnose structured block violations.
13054 It is not part of the "omplower" pass, as that's invoked too late. It
13055 should be invoked by the respective front ends after gimplification. */
13057 static splay_tree all_labels;
13059 /* Check for mismatched contexts and generate an error if needed. Return
13060 true if an error is detected. */
13062 static bool
13063 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
13064 gimple *branch_ctx, gimple *label_ctx)
13066 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
13067 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
13069 if (label_ctx == branch_ctx)
13070 return false;
13072 const char* kind = NULL;
13074 if (flag_openacc)
13076 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
13077 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
13079 gcc_checking_assert (kind == NULL);
13080 kind = "OpenACC";
13083 if (kind == NULL)
13085 gcc_checking_assert (flag_openmp || flag_openmp_simd);
13086 kind = "OpenMP";
13089 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13090 so we could traverse it and issue a correct "exit" or "enter" error
13091 message upon a structured block violation.
13093 We built the context by building a list with tree_cons'ing, but there is
13094 no easy counterpart in gimple tuples. It seems like far too much work
13095 for issuing exit/enter error messages. If someone really misses the
13096 distinct error message... patches welcome. */
13098 #if 0
13099 /* Try to avoid confusing the user by producing and error message
13100 with correct "exit" or "enter" verbiage. We prefer "exit"
13101 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13102 if (branch_ctx == NULL)
13103 exit_p = false;
13104 else
13106 while (label_ctx)
13108 if (TREE_VALUE (label_ctx) == branch_ctx)
13110 exit_p = false;
13111 break;
13113 label_ctx = TREE_CHAIN (label_ctx);
13117 if (exit_p)
13118 error ("invalid exit from %s structured block", kind);
13119 else
13120 error ("invalid entry to %s structured block", kind);
13121 #endif
13123 /* If it's obvious we have an invalid entry, be specific about the error. */
13124 if (branch_ctx == NULL)
13125 error ("invalid entry to %s structured block", kind);
13126 else
13128 /* Otherwise, be vague and lazy, but efficient. */
13129 error ("invalid branch to/from %s structured block", kind);
13132 gsi_replace (gsi_p, gimple_build_nop (), false);
13133 return true;
13136 /* Pass 1: Create a minimal tree of structured blocks, and record
13137 where each label is found. */
13139 static tree
13140 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13141 struct walk_stmt_info *wi)
13143 gimple *context = (gimple *) wi->info;
13144 gimple *inner_context;
13145 gimple *stmt = gsi_stmt (*gsi_p);
13147 *handled_ops_p = true;
13149 switch (gimple_code (stmt))
13151 WALK_SUBSTMTS;
13153 case GIMPLE_OMP_PARALLEL:
13154 case GIMPLE_OMP_TASK:
13155 case GIMPLE_OMP_SECTIONS:
13156 case GIMPLE_OMP_SINGLE:
13157 case GIMPLE_OMP_SECTION:
13158 case GIMPLE_OMP_MASTER:
13159 case GIMPLE_OMP_ORDERED:
13160 case GIMPLE_OMP_SCAN:
13161 case GIMPLE_OMP_CRITICAL:
13162 case GIMPLE_OMP_TARGET:
13163 case GIMPLE_OMP_TEAMS:
13164 case GIMPLE_OMP_TASKGROUP:
13165 /* The minimal context here is just the current OMP construct. */
13166 inner_context = stmt;
13167 wi->info = inner_context;
13168 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13169 wi->info = context;
13170 break;
13172 case GIMPLE_OMP_FOR:
13173 inner_context = stmt;
13174 wi->info = inner_context;
13175 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13176 walk them. */
13177 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
13178 diagnose_sb_1, NULL, wi);
13179 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13180 wi->info = context;
13181 break;
13183 case GIMPLE_LABEL:
13184 splay_tree_insert (all_labels,
13185 (splay_tree_key) gimple_label_label (
13186 as_a <glabel *> (stmt)),
13187 (splay_tree_value) context);
13188 break;
13190 default:
13191 break;
13194 return NULL_TREE;
13197 /* Pass 2: Check each branch and see if its context differs from that of
13198 the destination label's context. */
13200 static tree
13201 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13202 struct walk_stmt_info *wi)
13204 gimple *context = (gimple *) wi->info;
13205 splay_tree_node n;
13206 gimple *stmt = gsi_stmt (*gsi_p);
13208 *handled_ops_p = true;
13210 switch (gimple_code (stmt))
13212 WALK_SUBSTMTS;
13214 case GIMPLE_OMP_PARALLEL:
13215 case GIMPLE_OMP_TASK:
13216 case GIMPLE_OMP_SECTIONS:
13217 case GIMPLE_OMP_SINGLE:
13218 case GIMPLE_OMP_SECTION:
13219 case GIMPLE_OMP_MASTER:
13220 case GIMPLE_OMP_ORDERED:
13221 case GIMPLE_OMP_SCAN:
13222 case GIMPLE_OMP_CRITICAL:
13223 case GIMPLE_OMP_TARGET:
13224 case GIMPLE_OMP_TEAMS:
13225 case GIMPLE_OMP_TASKGROUP:
13226 wi->info = stmt;
13227 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13228 wi->info = context;
13229 break;
13231 case GIMPLE_OMP_FOR:
13232 wi->info = stmt;
13233 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13234 walk them. */
13235 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13236 diagnose_sb_2, NULL, wi);
13237 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13238 wi->info = context;
13239 break;
13241 case GIMPLE_COND:
13243 gcond *cond_stmt = as_a <gcond *> (stmt);
13244 tree lab = gimple_cond_true_label (cond_stmt);
13245 if (lab)
13247 n = splay_tree_lookup (all_labels,
13248 (splay_tree_key) lab);
13249 diagnose_sb_0 (gsi_p, context,
13250 n ? (gimple *) n->value : NULL);
13252 lab = gimple_cond_false_label (cond_stmt);
13253 if (lab)
13255 n = splay_tree_lookup (all_labels,
13256 (splay_tree_key) lab);
13257 diagnose_sb_0 (gsi_p, context,
13258 n ? (gimple *) n->value : NULL);
13261 break;
13263 case GIMPLE_GOTO:
13265 tree lab = gimple_goto_dest (stmt);
13266 if (TREE_CODE (lab) != LABEL_DECL)
13267 break;
13269 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13270 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13272 break;
13274 case GIMPLE_SWITCH:
13276 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13277 unsigned int i;
13278 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13280 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13281 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13282 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13283 break;
13286 break;
13288 case GIMPLE_RETURN:
13289 diagnose_sb_0 (gsi_p, context, NULL);
13290 break;
13292 default:
13293 break;
13296 return NULL_TREE;
13299 static unsigned int
13300 diagnose_omp_structured_block_errors (void)
13302 struct walk_stmt_info wi;
13303 gimple_seq body = gimple_body (current_function_decl);
13305 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13307 memset (&wi, 0, sizeof (wi));
13308 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13310 memset (&wi, 0, sizeof (wi));
13311 wi.want_locations = true;
13312 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13314 gimple_set_body (current_function_decl, body);
13316 splay_tree_delete (all_labels);
13317 all_labels = NULL;
13319 return 0;
13322 namespace {
13324 const pass_data pass_data_diagnose_omp_blocks =
13326 GIMPLE_PASS, /* type */
13327 "*diagnose_omp_blocks", /* name */
13328 OPTGROUP_OMP, /* optinfo_flags */
13329 TV_NONE, /* tv_id */
13330 PROP_gimple_any, /* properties_required */
13331 0, /* properties_provided */
13332 0, /* properties_destroyed */
13333 0, /* todo_flags_start */
13334 0, /* todo_flags_finish */
13337 class pass_diagnose_omp_blocks : public gimple_opt_pass
13339 public:
13340 pass_diagnose_omp_blocks (gcc::context *ctxt)
13341 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13344 /* opt_pass methods: */
13345 virtual bool gate (function *)
13347 return flag_openacc || flag_openmp || flag_openmp_simd;
13349 virtual unsigned int execute (function *)
13351 return diagnose_omp_structured_block_errors ();
13354 }; // class pass_diagnose_omp_blocks
13356 } // anon namespace
13358 gimple_opt_pass *
13359 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13361 return new pass_diagnose_omp_blocks (ctxt);
13365 #include "gt-omp-low.h"