gcov: make profile merging smarter
[official-gcc.git] / gcc / omp-low.c
blob057b7ae4866ff1b7c0eee58c83354399cb015884
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2021 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
154 /* True if this construct can be cancelled. */
155 bool cancellable;
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap task_shared_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
195 static void scan_omp (gimple_seq *, omp_context *);
196 static tree scan_omp_1_op (tree *, int *, void *);
198 #define WALK_SUBSTMTS \
199 case GIMPLE_BIND: \
200 case GIMPLE_TRY: \
201 case GIMPLE_CATCH: \
202 case GIMPLE_EH_FILTER: \
203 case GIMPLE_TRANSACTION: \
204 /* The sub-statements for these should be walked. */ \
205 *handled_ops_p = false; \
206 break;
208 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
209 (This doesn't include OpenACC 'kernels' decomposed parts.) */
211 static bool
212 is_oacc_parallel_or_serial (omp_context *ctx)
214 enum gimple_code outer_type = gimple_code (ctx->stmt);
215 return ((outer_type == GIMPLE_OMP_TARGET)
216 && ((gimple_omp_target_kind (ctx->stmt)
217 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
218 || (gimple_omp_target_kind (ctx->stmt)
219 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
222 /* Return whether CTX represents an OpenACC 'kernels' construct.
223 (This doesn't include OpenACC 'kernels' decomposed parts.) */
225 static bool
226 is_oacc_kernels (omp_context *ctx)
228 enum gimple_code outer_type = gimple_code (ctx->stmt);
229 return ((outer_type == GIMPLE_OMP_TARGET)
230 && (gimple_omp_target_kind (ctx->stmt)
231 == GF_OMP_TARGET_KIND_OACC_KERNELS));
234 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
236 static bool
237 is_oacc_kernels_decomposed_part (omp_context *ctx)
239 enum gimple_code outer_type = gimple_code (ctx->stmt);
240 return ((outer_type == GIMPLE_OMP_TARGET)
241 && ((gimple_omp_target_kind (ctx->stmt)
242 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
243 || (gimple_omp_target_kind (ctx->stmt)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
245 || (gimple_omp_target_kind (ctx->stmt)
246 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
249 /* Return true if STMT corresponds to an OpenMP target region. */
250 static bool
251 is_omp_target (gimple *stmt)
253 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
255 int kind = gimple_omp_target_kind (stmt);
256 return (kind == GF_OMP_TARGET_KIND_REGION
257 || kind == GF_OMP_TARGET_KIND_DATA
258 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
259 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
261 return false;
264 /* If DECL is the artificial dummy VAR_DECL created for non-static
265 data member privatization, return the underlying "this" parameter,
266 otherwise return NULL. */
268 tree
269 omp_member_access_dummy_var (tree decl)
271 if (!VAR_P (decl)
272 || !DECL_ARTIFICIAL (decl)
273 || !DECL_IGNORED_P (decl)
274 || !DECL_HAS_VALUE_EXPR_P (decl)
275 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
276 return NULL_TREE;
278 tree v = DECL_VALUE_EXPR (decl);
279 if (TREE_CODE (v) != COMPONENT_REF)
280 return NULL_TREE;
282 while (1)
283 switch (TREE_CODE (v))
285 case COMPONENT_REF:
286 case MEM_REF:
287 case INDIRECT_REF:
288 CASE_CONVERT:
289 case POINTER_PLUS_EXPR:
290 v = TREE_OPERAND (v, 0);
291 continue;
292 case PARM_DECL:
293 if (DECL_CONTEXT (v) == current_function_decl
294 && DECL_ARTIFICIAL (v)
295 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
296 return v;
297 return NULL_TREE;
298 default:
299 return NULL_TREE;
303 /* Helper for unshare_and_remap, called through walk_tree. */
305 static tree
306 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
308 tree *pair = (tree *) data;
309 if (*tp == pair[0])
311 *tp = unshare_expr (pair[1]);
312 *walk_subtrees = 0;
314 else if (IS_TYPE_OR_DECL_P (*tp))
315 *walk_subtrees = 0;
316 return NULL_TREE;
319 /* Return unshare_expr (X) with all occurrences of FROM
320 replaced with TO. */
322 static tree
323 unshare_and_remap (tree x, tree from, tree to)
325 tree pair[2] = { from, to };
326 x = unshare_expr (x);
327 walk_tree (&x, unshare_and_remap_1, pair, NULL);
328 return x;
331 /* Convenience function for calling scan_omp_1_op on tree operands. */
333 static inline tree
334 scan_omp_op (tree *tp, omp_context *ctx)
336 struct walk_stmt_info wi;
338 memset (&wi, 0, sizeof (wi));
339 wi.info = ctx;
340 wi.want_locations = true;
342 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
345 static void lower_omp (gimple_seq *, omp_context *);
346 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
347 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
349 /* Return true if CTX is for an omp parallel. */
351 static inline bool
352 is_parallel_ctx (omp_context *ctx)
354 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
358 /* Return true if CTX is for an omp task. */
360 static inline bool
361 is_task_ctx (omp_context *ctx)
363 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
367 /* Return true if CTX is for an omp taskloop. */
369 static inline bool
370 is_taskloop_ctx (omp_context *ctx)
372 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
373 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
377 /* Return true if CTX is for a host omp teams. */
379 static inline bool
380 is_host_teams_ctx (omp_context *ctx)
382 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
383 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
386 /* Return true if CTX is for an omp parallel or omp task or host omp teams
387 (the last one is strictly not a task region in OpenMP speak, but we
388 need to treat it similarly). */
390 static inline bool
391 is_taskreg_ctx (omp_context *ctx)
393 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
396 /* Return true if EXPR is variable sized. */
398 static inline bool
399 is_variable_sized (const_tree expr)
401 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
404 /* Lookup variables. The "maybe" form
405 allows for the variable form to not have been entered, otherwise we
406 assert that the variable must have been entered. */
408 static inline tree
409 lookup_decl (tree var, omp_context *ctx)
411 tree *n = ctx->cb.decl_map->get (var);
412 return *n;
415 static inline tree
416 maybe_lookup_decl (const_tree var, omp_context *ctx)
418 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
419 return n ? *n : NULL_TREE;
422 static inline tree
423 lookup_field (tree var, omp_context *ctx)
425 splay_tree_node n;
426 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
427 return (tree) n->value;
430 static inline tree
431 lookup_sfield (splay_tree_key key, omp_context *ctx)
433 splay_tree_node n;
434 n = splay_tree_lookup (ctx->sfield_map
435 ? ctx->sfield_map : ctx->field_map, key);
436 return (tree) n->value;
439 static inline tree
440 lookup_sfield (tree var, omp_context *ctx)
442 return lookup_sfield ((splay_tree_key) var, ctx);
445 static inline tree
446 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
448 splay_tree_node n;
449 n = splay_tree_lookup (ctx->field_map, key);
450 return n ? (tree) n->value : NULL_TREE;
453 static inline tree
454 maybe_lookup_field (tree var, omp_context *ctx)
456 return maybe_lookup_field ((splay_tree_key) var, ctx);
459 /* Return true if DECL should be copied by pointer. SHARED_CTX is
460 the parallel context if DECL is to be shared. */
462 static bool
463 use_pointer_for_field (tree decl, omp_context *shared_ctx)
465 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
466 || TYPE_ATOMIC (TREE_TYPE (decl)))
467 return true;
469 /* We can only use copy-in/copy-out semantics for shared variables
470 when we know the value is not accessible from an outer scope. */
471 if (shared_ctx)
473 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
475 /* ??? Trivially accessible from anywhere. But why would we even
476 be passing an address in this case? Should we simply assert
477 this to be false, or should we have a cleanup pass that removes
478 these from the list of mappings? */
479 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
480 return true;
482 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
483 without analyzing the expression whether or not its location
484 is accessible to anyone else. In the case of nested parallel
485 regions it certainly may be. */
486 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
487 return true;
489 /* Do not use copy-in/copy-out for variables that have their
490 address taken. */
491 if (is_global_var (decl))
493 /* For file scope vars, track whether we've seen them as
494 non-addressable initially and in that case, keep the same
495 answer for the duration of the pass, even when they are made
496 addressable later on e.g. through reduction expansion. Global
497 variables which weren't addressable before the pass will not
498 have their privatized copies address taken. See PR91216. */
499 if (!TREE_ADDRESSABLE (decl))
501 if (!global_nonaddressable_vars)
502 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
503 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
505 else if (!global_nonaddressable_vars
506 || !bitmap_bit_p (global_nonaddressable_vars,
507 DECL_UID (decl)))
508 return true;
510 else if (TREE_ADDRESSABLE (decl))
511 return true;
513 /* lower_send_shared_vars only uses copy-in, but not copy-out
514 for these. */
515 if (TREE_READONLY (decl)
516 || ((TREE_CODE (decl) == RESULT_DECL
517 || TREE_CODE (decl) == PARM_DECL)
518 && DECL_BY_REFERENCE (decl)))
519 return false;
521 /* Disallow copy-in/out in nested parallel if
522 decl is shared in outer parallel, otherwise
523 each thread could store the shared variable
524 in its own copy-in location, making the
525 variable no longer really shared. */
526 if (shared_ctx->is_nested)
528 omp_context *up;
530 for (up = shared_ctx->outer; up; up = up->outer)
531 if ((is_taskreg_ctx (up)
532 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
533 && is_gimple_omp_offloaded (up->stmt)))
534 && maybe_lookup_decl (decl, up))
535 break;
537 if (up)
539 tree c;
541 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
543 for (c = gimple_omp_target_clauses (up->stmt);
544 c; c = OMP_CLAUSE_CHAIN (c))
545 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
546 && OMP_CLAUSE_DECL (c) == decl)
547 break;
549 else
550 for (c = gimple_omp_taskreg_clauses (up->stmt);
551 c; c = OMP_CLAUSE_CHAIN (c))
552 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
553 && OMP_CLAUSE_DECL (c) == decl)
554 break;
556 if (c)
557 goto maybe_mark_addressable_and_ret;
561 /* For tasks avoid using copy-in/out. As tasks can be
562 deferred or executed in different thread, when GOMP_task
563 returns, the task hasn't necessarily terminated. */
564 if (is_task_ctx (shared_ctx))
566 tree outer;
567 maybe_mark_addressable_and_ret:
568 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
569 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
571 /* Taking address of OUTER in lower_send_shared_vars
572 might need regimplification of everything that uses the
573 variable. */
574 if (!task_shared_vars)
575 task_shared_vars = BITMAP_ALLOC (NULL);
576 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
577 TREE_ADDRESSABLE (outer) = 1;
579 return true;
583 return false;
586 /* Construct a new automatic decl similar to VAR. */
588 static tree
589 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
591 tree copy = copy_var_decl (var, name, type);
593 DECL_CONTEXT (copy) = current_function_decl;
594 DECL_CHAIN (copy) = ctx->block_vars;
595 /* If VAR is listed in task_shared_vars, it means it wasn't
596 originally addressable and is just because task needs to take
597 it's address. But we don't need to take address of privatizations
598 from that var. */
599 if (TREE_ADDRESSABLE (var)
600 && ((task_shared_vars
601 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
602 || (global_nonaddressable_vars
603 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
604 TREE_ADDRESSABLE (copy) = 0;
605 ctx->block_vars = copy;
607 return copy;
610 static tree
611 omp_copy_decl_1 (tree var, omp_context *ctx)
613 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
616 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
617 as appropriate. */
618 /* See also 'gcc/omp-oacc-neuter-broadcast.cc:oacc_build_component_ref'. */
620 static tree
621 omp_build_component_ref (tree obj, tree field)
623 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
624 if (TREE_THIS_VOLATILE (field))
625 TREE_THIS_VOLATILE (ret) |= 1;
626 if (TREE_READONLY (field))
627 TREE_READONLY (ret) |= 1;
628 return ret;
631 /* Build tree nodes to access the field for VAR on the receiver side. */
633 static tree
634 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
636 tree x, field = lookup_field (var, ctx);
638 /* If the receiver record type was remapped in the child function,
639 remap the field into the new record type. */
640 x = maybe_lookup_field (field, ctx);
641 if (x != NULL)
642 field = x;
644 x = build_simple_mem_ref (ctx->receiver_decl);
645 TREE_THIS_NOTRAP (x) = 1;
646 x = omp_build_component_ref (x, field);
647 if (by_ref)
649 x = build_simple_mem_ref (x);
650 TREE_THIS_NOTRAP (x) = 1;
653 return x;
656 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
657 of a parallel, this is a component reference; for workshare constructs
658 this is some variable. */
660 static tree
661 build_outer_var_ref (tree var, omp_context *ctx,
662 enum omp_clause_code code = OMP_CLAUSE_ERROR)
664 tree x;
665 omp_context *outer = ctx->outer;
666 for (; outer; outer = outer->outer)
668 if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
669 continue;
670 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
671 && !maybe_lookup_decl (var, outer))
672 continue;
673 break;
676 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
677 x = var;
678 else if (is_variable_sized (var))
680 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
681 x = build_outer_var_ref (x, ctx, code);
682 x = build_simple_mem_ref (x);
684 else if (is_taskreg_ctx (ctx))
686 bool by_ref = use_pointer_for_field (var, NULL);
687 x = build_receiver_ref (var, by_ref, ctx);
689 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
690 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
691 || ctx->loop_p
692 || (code == OMP_CLAUSE_PRIVATE
693 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
694 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
695 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
697 /* #pragma omp simd isn't a worksharing construct, and can reference
698 even private vars in its linear etc. clauses.
699 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
700 to private vars in all worksharing constructs. */
701 x = NULL_TREE;
702 if (outer && is_taskreg_ctx (outer))
703 x = lookup_decl (var, outer);
704 else if (outer)
705 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
706 if (x == NULL_TREE)
707 x = var;
709 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
711 gcc_assert (outer);
712 splay_tree_node n
713 = splay_tree_lookup (outer->field_map,
714 (splay_tree_key) &DECL_UID (var));
715 if (n == NULL)
717 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
718 x = var;
719 else
720 x = lookup_decl (var, outer);
722 else
724 tree field = (tree) n->value;
725 /* If the receiver record type was remapped in the child function,
726 remap the field into the new record type. */
727 x = maybe_lookup_field (field, outer);
728 if (x != NULL)
729 field = x;
731 x = build_simple_mem_ref (outer->receiver_decl);
732 x = omp_build_component_ref (x, field);
733 if (use_pointer_for_field (var, outer))
734 x = build_simple_mem_ref (x);
737 else if (outer)
738 x = lookup_decl (var, outer);
739 else if (omp_privatize_by_reference (var))
740 /* This can happen with orphaned constructs. If var is reference, it is
741 possible it is shared and as such valid. */
742 x = var;
743 else if (omp_member_access_dummy_var (var))
744 x = var;
745 else
746 gcc_unreachable ();
748 if (x == var)
750 tree t = omp_member_access_dummy_var (var);
751 if (t)
753 x = DECL_VALUE_EXPR (var);
754 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
755 if (o != t)
756 x = unshare_and_remap (x, t, o);
757 else
758 x = unshare_expr (x);
762 if (omp_privatize_by_reference (var))
763 x = build_simple_mem_ref (x);
765 return x;
768 /* Build tree nodes to access the field for VAR on the sender side. */
770 static tree
771 build_sender_ref (splay_tree_key key, omp_context *ctx)
773 tree field = lookup_sfield (key, ctx);
774 return omp_build_component_ref (ctx->sender_decl, field);
777 static tree
778 build_sender_ref (tree var, omp_context *ctx)
780 return build_sender_ref ((splay_tree_key) var, ctx);
783 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
784 BASE_POINTERS_RESTRICT, declare the field with restrict. */
786 static void
787 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
789 tree field, type, sfield = NULL_TREE;
790 splay_tree_key key = (splay_tree_key) var;
792 if ((mask & 16) != 0)
794 key = (splay_tree_key) &DECL_NAME (var);
795 gcc_checking_assert (key != (splay_tree_key) var);
797 if ((mask & 8) != 0)
799 key = (splay_tree_key) &DECL_UID (var);
800 gcc_checking_assert (key != (splay_tree_key) var);
802 gcc_assert ((mask & 1) == 0
803 || !splay_tree_lookup (ctx->field_map, key));
804 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
805 || !splay_tree_lookup (ctx->sfield_map, key));
806 gcc_assert ((mask & 3) == 3
807 || !is_gimple_omp_oacc (ctx->stmt));
809 type = TREE_TYPE (var);
810 if ((mask & 16) != 0)
811 type = lang_hooks.decls.omp_array_data (var, true);
813 /* Prevent redeclaring the var in the split-off function with a restrict
814 pointer type. Note that we only clear type itself, restrict qualifiers in
815 the pointed-to type will be ignored by points-to analysis. */
816 if (POINTER_TYPE_P (type)
817 && TYPE_RESTRICT (type))
818 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
820 if (mask & 4)
822 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
823 type = build_pointer_type (build_pointer_type (type));
825 else if (by_ref)
826 type = build_pointer_type (type);
827 else if ((mask & (32 | 3)) == 1
828 && omp_privatize_by_reference (var))
829 type = TREE_TYPE (type);
831 field = build_decl (DECL_SOURCE_LOCATION (var),
832 FIELD_DECL, DECL_NAME (var), type);
834 /* Remember what variable this field was created for. This does have a
835 side effect of making dwarf2out ignore this member, so for helpful
836 debugging we clear it later in delete_omp_context. */
837 DECL_ABSTRACT_ORIGIN (field) = var;
838 if ((mask & 16) == 0 && type == TREE_TYPE (var))
840 SET_DECL_ALIGN (field, DECL_ALIGN (var));
841 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
842 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
844 else
845 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
847 if ((mask & 3) == 3)
849 insert_field_into_struct (ctx->record_type, field);
850 if (ctx->srecord_type)
852 sfield = build_decl (DECL_SOURCE_LOCATION (var),
853 FIELD_DECL, DECL_NAME (var), type);
854 DECL_ABSTRACT_ORIGIN (sfield) = var;
855 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
856 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
857 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
858 insert_field_into_struct (ctx->srecord_type, sfield);
861 else
863 if (ctx->srecord_type == NULL_TREE)
865 tree t;
867 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
868 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
869 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
871 sfield = build_decl (DECL_SOURCE_LOCATION (t),
872 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
873 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
874 insert_field_into_struct (ctx->srecord_type, sfield);
875 splay_tree_insert (ctx->sfield_map,
876 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
877 (splay_tree_value) sfield);
880 sfield = field;
881 insert_field_into_struct ((mask & 1) ? ctx->record_type
882 : ctx->srecord_type, field);
885 if (mask & 1)
886 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
887 if ((mask & 2) && ctx->sfield_map)
888 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
891 static tree
892 install_var_local (tree var, omp_context *ctx)
894 tree new_var = omp_copy_decl_1 (var, ctx);
895 insert_decl_map (&ctx->cb, var, new_var);
896 return new_var;
899 /* Adjust the replacement for DECL in CTX for the new context. This means
900 copying the DECL_VALUE_EXPR, and fixing up the type. */
902 static void
903 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
905 tree new_decl, size;
907 new_decl = lookup_decl (decl, ctx);
909 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
911 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
912 && DECL_HAS_VALUE_EXPR_P (decl))
914 tree ve = DECL_VALUE_EXPR (decl);
915 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
916 SET_DECL_VALUE_EXPR (new_decl, ve);
917 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
920 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
922 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
923 if (size == error_mark_node)
924 size = TYPE_SIZE (TREE_TYPE (new_decl));
925 DECL_SIZE (new_decl) = size;
927 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
928 if (size == error_mark_node)
929 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
930 DECL_SIZE_UNIT (new_decl) = size;
934 /* The callback for remap_decl. Search all containing contexts for a
935 mapping of the variable; this avoids having to duplicate the splay
936 tree ahead of time. We know a mapping doesn't already exist in the
937 given context. Create new mappings to implement default semantics. */
939 static tree
940 omp_copy_decl (tree var, copy_body_data *cb)
942 omp_context *ctx = (omp_context *) cb;
943 tree new_var;
945 if (TREE_CODE (var) == LABEL_DECL)
947 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
948 return var;
949 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
950 DECL_CONTEXT (new_var) = current_function_decl;
951 insert_decl_map (&ctx->cb, var, new_var);
952 return new_var;
955 while (!is_taskreg_ctx (ctx))
957 ctx = ctx->outer;
958 if (ctx == NULL)
959 return var;
960 new_var = maybe_lookup_decl (var, ctx);
961 if (new_var)
962 return new_var;
965 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
966 return var;
968 return error_mark_node;
971 /* Create a new context, with OUTER_CTX being the surrounding context. */
973 static omp_context *
974 new_omp_context (gimple *stmt, omp_context *outer_ctx)
976 omp_context *ctx = XCNEW (omp_context);
978 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
979 (splay_tree_value) ctx);
980 ctx->stmt = stmt;
982 if (outer_ctx)
984 ctx->outer = outer_ctx;
985 ctx->cb = outer_ctx->cb;
986 ctx->cb.block = NULL;
987 ctx->depth = outer_ctx->depth + 1;
989 else
991 ctx->cb.src_fn = current_function_decl;
992 ctx->cb.dst_fn = current_function_decl;
993 ctx->cb.src_node = cgraph_node::get (current_function_decl);
994 gcc_checking_assert (ctx->cb.src_node);
995 ctx->cb.dst_node = ctx->cb.src_node;
996 ctx->cb.src_cfun = cfun;
997 ctx->cb.copy_decl = omp_copy_decl;
998 ctx->cb.eh_lp_nr = 0;
999 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
1000 ctx->cb.adjust_array_error_bounds = true;
1001 ctx->cb.dont_remap_vla_if_no_change = true;
1002 ctx->depth = 1;
1005 ctx->cb.decl_map = new hash_map<tree, tree>;
1007 return ctx;
1010 static gimple_seq maybe_catch_exception (gimple_seq);
1012 /* Finalize task copyfn. */
1014 static void
1015 finalize_task_copyfn (gomp_task *task_stmt)
1017 struct function *child_cfun;
1018 tree child_fn;
1019 gimple_seq seq = NULL, new_seq;
1020 gbind *bind;
1022 child_fn = gimple_omp_task_copy_fn (task_stmt);
1023 if (child_fn == NULL_TREE)
1024 return;
1026 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1027 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1029 push_cfun (child_cfun);
1030 bind = gimplify_body (child_fn, false);
1031 gimple_seq_add_stmt (&seq, bind);
1032 new_seq = maybe_catch_exception (seq);
1033 if (new_seq != seq)
1035 bind = gimple_build_bind (NULL, new_seq, NULL);
1036 seq = NULL;
1037 gimple_seq_add_stmt (&seq, bind);
1039 gimple_set_body (child_fn, seq);
1040 pop_cfun ();
1042 /* Inform the callgraph about the new function. */
1043 cgraph_node *node = cgraph_node::get_create (child_fn);
1044 node->parallelized_function = 1;
1045 cgraph_node::add_new_function (child_fn, false);
1048 /* Destroy a omp_context data structures. Called through the splay tree
1049 value delete callback. */
1051 static void
1052 delete_omp_context (splay_tree_value value)
1054 omp_context *ctx = (omp_context *) value;
1056 delete ctx->cb.decl_map;
1058 if (ctx->field_map)
1059 splay_tree_delete (ctx->field_map);
1060 if (ctx->sfield_map)
1061 splay_tree_delete (ctx->sfield_map);
1063 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1064 it produces corrupt debug information. */
1065 if (ctx->record_type)
1067 tree t;
1068 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1069 DECL_ABSTRACT_ORIGIN (t) = NULL;
1071 if (ctx->srecord_type)
1073 tree t;
1074 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1075 DECL_ABSTRACT_ORIGIN (t) = NULL;
1078 if (is_task_ctx (ctx))
1079 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1081 if (ctx->task_reduction_map)
1083 ctx->task_reductions.release ();
1084 delete ctx->task_reduction_map;
1087 delete ctx->lastprivate_conditional_map;
1088 delete ctx->allocate_map;
1090 XDELETE (ctx);
1093 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1094 context. */
1096 static void
1097 fixup_child_record_type (omp_context *ctx)
1099 tree f, type = ctx->record_type;
1101 if (!ctx->receiver_decl)
1102 return;
1103 /* ??? It isn't sufficient to just call remap_type here, because
1104 variably_modified_type_p doesn't work the way we expect for
1105 record types. Testing each field for whether it needs remapping
1106 and creating a new record by hand works, however. */
1107 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1108 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1109 break;
1110 if (f)
1112 tree name, new_fields = NULL;
1114 type = lang_hooks.types.make_type (RECORD_TYPE);
1115 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1116 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1117 TYPE_DECL, name, type);
1118 TYPE_NAME (type) = name;
1120 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1122 tree new_f = copy_node (f);
1123 DECL_CONTEXT (new_f) = type;
1124 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1125 DECL_CHAIN (new_f) = new_fields;
1126 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1127 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1128 &ctx->cb, NULL);
1129 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1130 &ctx->cb, NULL);
1131 new_fields = new_f;
1133 /* Arrange to be able to look up the receiver field
1134 given the sender field. */
1135 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1136 (splay_tree_value) new_f);
1138 TYPE_FIELDS (type) = nreverse (new_fields);
1139 layout_type (type);
1142 /* In a target region we never modify any of the pointers in *.omp_data_i,
1143 so attempt to help the optimizers. */
1144 if (is_gimple_omp_offloaded (ctx->stmt))
1145 type = build_qualified_type (type, TYPE_QUAL_CONST);
1147 TREE_TYPE (ctx->receiver_decl)
1148 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1151 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1152 specified by CLAUSES. */
1154 static void
1155 scan_sharing_clauses (tree clauses, omp_context *ctx)
1157 tree c, decl;
1158 bool scan_array_reductions = false;
1160 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1161 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1162 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1163 /* omp_default_mem_alloc is 1 */
1164 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1165 || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1167 if (ctx->allocate_map == NULL)
1168 ctx->allocate_map = new hash_map<tree, tree>;
1169 tree val = integer_zero_node;
1170 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1171 val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1172 if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1173 val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1174 ctx->allocate_map->put (OMP_CLAUSE_DECL (c), val);
1177 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1179 bool by_ref;
1181 switch (OMP_CLAUSE_CODE (c))
1183 case OMP_CLAUSE_PRIVATE:
1184 decl = OMP_CLAUSE_DECL (c);
1185 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1186 goto do_private;
1187 else if (!is_variable_sized (decl))
1188 install_var_local (decl, ctx);
1189 break;
1191 case OMP_CLAUSE_SHARED:
1192 decl = OMP_CLAUSE_DECL (c);
1193 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1194 ctx->allocate_map->remove (decl);
1195 /* Ignore shared directives in teams construct inside of
1196 target construct. */
1197 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1198 && !is_host_teams_ctx (ctx))
1200 /* Global variables don't need to be copied,
1201 the receiver side will use them directly. */
1202 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1203 if (is_global_var (odecl))
1204 break;
1205 insert_decl_map (&ctx->cb, decl, odecl);
1206 break;
1208 gcc_assert (is_taskreg_ctx (ctx));
1209 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1210 || !is_variable_sized (decl));
1211 /* Global variables don't need to be copied,
1212 the receiver side will use them directly. */
1213 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1214 break;
1215 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1217 use_pointer_for_field (decl, ctx);
1218 break;
1220 by_ref = use_pointer_for_field (decl, NULL);
1221 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1222 || TREE_ADDRESSABLE (decl)
1223 || by_ref
1224 || omp_privatize_by_reference (decl))
1226 by_ref = use_pointer_for_field (decl, ctx);
1227 install_var_field (decl, by_ref, 3, ctx);
1228 install_var_local (decl, ctx);
1229 break;
1231 /* We don't need to copy const scalar vars back. */
1232 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1233 goto do_private;
1235 case OMP_CLAUSE_REDUCTION:
1236 /* Collect 'reduction' clauses on OpenACC compute construct. */
1237 if (is_gimple_omp_oacc (ctx->stmt)
1238 && is_gimple_omp_offloaded (ctx->stmt))
1240 /* No 'reduction' clauses on OpenACC 'kernels'. */
1241 gcc_checking_assert (!is_oacc_kernels (ctx));
1242 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1243 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1245 ctx->local_reduction_clauses
1246 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1248 /* FALLTHRU */
1250 case OMP_CLAUSE_IN_REDUCTION:
1251 decl = OMP_CLAUSE_DECL (c);
1252 if (ctx->allocate_map
1253 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1254 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1255 || OMP_CLAUSE_REDUCTION_TASK (c)))
1256 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1257 || is_task_ctx (ctx)))
1259 /* For now. */
1260 if (ctx->allocate_map->get (decl))
1261 ctx->allocate_map->remove (decl);
1263 if (TREE_CODE (decl) == MEM_REF)
1265 tree t = TREE_OPERAND (decl, 0);
1266 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1267 t = TREE_OPERAND (t, 0);
1268 if (TREE_CODE (t) == INDIRECT_REF
1269 || TREE_CODE (t) == ADDR_EXPR)
1270 t = TREE_OPERAND (t, 0);
1271 if (is_omp_target (ctx->stmt))
1273 if (is_variable_sized (t))
1275 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1276 t = DECL_VALUE_EXPR (t);
1277 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
1278 t = TREE_OPERAND (t, 0);
1279 gcc_assert (DECL_P (t));
1281 tree at = t;
1282 if (ctx->outer)
1283 scan_omp_op (&at, ctx->outer);
1284 tree nt = omp_copy_decl_1 (at, ctx);
1285 splay_tree_insert (ctx->field_map,
1286 (splay_tree_key) &DECL_CONTEXT (t),
1287 (splay_tree_value) nt);
1288 if (at != t)
1289 splay_tree_insert (ctx->field_map,
1290 (splay_tree_key) &DECL_CONTEXT (at),
1291 (splay_tree_value) nt);
1292 break;
1294 install_var_local (t, ctx);
1295 if (is_taskreg_ctx (ctx)
1296 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1297 || (is_task_ctx (ctx)
1298 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1299 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1300 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1301 == POINTER_TYPE)))))
1302 && !is_variable_sized (t)
1303 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1304 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1305 && !is_task_ctx (ctx))))
1307 by_ref = use_pointer_for_field (t, NULL);
1308 if (is_task_ctx (ctx)
1309 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1310 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1312 install_var_field (t, false, 1, ctx);
1313 install_var_field (t, by_ref, 2, ctx);
1315 else
1316 install_var_field (t, by_ref, 3, ctx);
1318 break;
1320 if (is_omp_target (ctx->stmt))
1322 tree at = decl;
1323 if (ctx->outer)
1324 scan_omp_op (&at, ctx->outer);
1325 tree nt = omp_copy_decl_1 (at, ctx);
1326 splay_tree_insert (ctx->field_map,
1327 (splay_tree_key) &DECL_CONTEXT (decl),
1328 (splay_tree_value) nt);
1329 if (at != decl)
1330 splay_tree_insert (ctx->field_map,
1331 (splay_tree_key) &DECL_CONTEXT (at),
1332 (splay_tree_value) nt);
1333 break;
1335 if (is_task_ctx (ctx)
1336 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1337 && OMP_CLAUSE_REDUCTION_TASK (c)
1338 && is_parallel_ctx (ctx)))
1340 /* Global variables don't need to be copied,
1341 the receiver side will use them directly. */
1342 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1344 by_ref = use_pointer_for_field (decl, ctx);
1345 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1346 install_var_field (decl, by_ref, 3, ctx);
1348 install_var_local (decl, ctx);
1349 break;
1351 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1352 && OMP_CLAUSE_REDUCTION_TASK (c))
1354 install_var_local (decl, ctx);
1355 break;
1357 goto do_private;
1359 case OMP_CLAUSE_LASTPRIVATE:
1360 /* Let the corresponding firstprivate clause create
1361 the variable. */
1362 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1363 break;
1364 /* FALLTHRU */
1366 case OMP_CLAUSE_FIRSTPRIVATE:
1367 case OMP_CLAUSE_LINEAR:
1368 decl = OMP_CLAUSE_DECL (c);
1369 do_private:
1370 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1371 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1372 && is_gimple_omp_offloaded (ctx->stmt))
1374 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1376 by_ref = !omp_privatize_by_reference (decl);
1377 install_var_field (decl, by_ref, 3, ctx);
1379 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1380 install_var_field (decl, true, 3, ctx);
1381 else
1382 install_var_field (decl, false, 3, ctx);
1384 if (is_variable_sized (decl))
1386 if (is_task_ctx (ctx))
1388 if (ctx->allocate_map
1389 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1391 /* For now. */
1392 if (ctx->allocate_map->get (decl))
1393 ctx->allocate_map->remove (decl);
1395 install_var_field (decl, false, 1, ctx);
1397 break;
1399 else if (is_taskreg_ctx (ctx))
1401 bool global
1402 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1403 by_ref = use_pointer_for_field (decl, NULL);
1405 if (is_task_ctx (ctx)
1406 && (global || by_ref || omp_privatize_by_reference (decl)))
1408 if (ctx->allocate_map
1409 && ctx->allocate_map->get (decl))
1410 install_var_field (decl, by_ref, 32 | 1, ctx);
1411 else
1412 install_var_field (decl, false, 1, ctx);
1413 if (!global)
1414 install_var_field (decl, by_ref, 2, ctx);
1416 else if (!global)
1417 install_var_field (decl, by_ref, 3, ctx);
1419 install_var_local (decl, ctx);
1420 break;
1422 case OMP_CLAUSE_USE_DEVICE_PTR:
1423 case OMP_CLAUSE_USE_DEVICE_ADDR:
1424 decl = OMP_CLAUSE_DECL (c);
1426 /* Fortran array descriptors. */
1427 if (lang_hooks.decls.omp_array_data (decl, true))
1428 install_var_field (decl, false, 19, ctx);
1429 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1430 && !omp_privatize_by_reference (decl)
1431 && !omp_is_allocatable_or_ptr (decl))
1432 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1433 install_var_field (decl, true, 11, ctx);
1434 else
1435 install_var_field (decl, false, 11, ctx);
1436 if (DECL_SIZE (decl)
1437 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1439 tree decl2 = DECL_VALUE_EXPR (decl);
1440 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1441 decl2 = TREE_OPERAND (decl2, 0);
1442 gcc_assert (DECL_P (decl2));
1443 install_var_local (decl2, ctx);
1445 install_var_local (decl, ctx);
1446 break;
1448 case OMP_CLAUSE_IS_DEVICE_PTR:
1449 decl = OMP_CLAUSE_DECL (c);
1450 goto do_private;
1452 case OMP_CLAUSE__LOOPTEMP_:
1453 case OMP_CLAUSE__REDUCTEMP_:
1454 gcc_assert (is_taskreg_ctx (ctx));
1455 decl = OMP_CLAUSE_DECL (c);
1456 install_var_field (decl, false, 3, ctx);
1457 install_var_local (decl, ctx);
1458 break;
1460 case OMP_CLAUSE_COPYPRIVATE:
1461 case OMP_CLAUSE_COPYIN:
1462 decl = OMP_CLAUSE_DECL (c);
1463 by_ref = use_pointer_for_field (decl, NULL);
1464 install_var_field (decl, by_ref, 3, ctx);
1465 break;
1467 case OMP_CLAUSE_FINAL:
1468 case OMP_CLAUSE_IF:
1469 case OMP_CLAUSE_NUM_THREADS:
1470 case OMP_CLAUSE_NUM_TEAMS:
1471 case OMP_CLAUSE_THREAD_LIMIT:
1472 case OMP_CLAUSE_DEVICE:
1473 case OMP_CLAUSE_SCHEDULE:
1474 case OMP_CLAUSE_DIST_SCHEDULE:
1475 case OMP_CLAUSE_DEPEND:
1476 case OMP_CLAUSE_PRIORITY:
1477 case OMP_CLAUSE_GRAINSIZE:
1478 case OMP_CLAUSE_NUM_TASKS:
1479 case OMP_CLAUSE_NUM_GANGS:
1480 case OMP_CLAUSE_NUM_WORKERS:
1481 case OMP_CLAUSE_VECTOR_LENGTH:
1482 case OMP_CLAUSE_DETACH:
1483 case OMP_CLAUSE_FILTER:
1484 if (ctx->outer)
1485 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1486 break;
1488 case OMP_CLAUSE_TO:
1489 case OMP_CLAUSE_FROM:
1490 case OMP_CLAUSE_MAP:
1491 if (ctx->outer)
1492 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1493 decl = OMP_CLAUSE_DECL (c);
1494 /* Global variables with "omp declare target" attribute
1495 don't need to be copied, the receiver side will use them
1496 directly. However, global variables with "omp declare target link"
1497 attribute need to be copied. Or when ALWAYS modifier is used. */
1498 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1499 && DECL_P (decl)
1500 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1501 && (OMP_CLAUSE_MAP_KIND (c)
1502 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1503 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1504 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1505 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1506 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1507 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1508 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1509 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1510 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1511 && varpool_node::get_create (decl)->offloadable
1512 && !lookup_attribute ("omp declare target link",
1513 DECL_ATTRIBUTES (decl)))
1514 break;
1515 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1516 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1518 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1519 not offloaded; there is nothing to map for those. */
1520 if (!is_gimple_omp_offloaded (ctx->stmt)
1521 && !POINTER_TYPE_P (TREE_TYPE (decl))
1522 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1523 break;
1525 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1526 && DECL_P (decl)
1527 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1528 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1529 && is_omp_target (ctx->stmt))
1531 /* If this is an offloaded region, an attach operation should
1532 only exist when the pointer variable is mapped in a prior
1533 clause. */
1534 if (is_gimple_omp_offloaded (ctx->stmt))
1535 gcc_assert
1536 (maybe_lookup_decl (decl, ctx)
1537 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1538 && lookup_attribute ("omp declare target",
1539 DECL_ATTRIBUTES (decl))));
1541 /* By itself, attach/detach is generated as part of pointer
1542 variable mapping and should not create new variables in the
1543 offloaded region, however sender refs for it must be created
1544 for its address to be passed to the runtime. */
1545 tree field
1546 = build_decl (OMP_CLAUSE_LOCATION (c),
1547 FIELD_DECL, NULL_TREE, ptr_type_node);
1548 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1549 insert_field_into_struct (ctx->record_type, field);
1550 /* To not clash with a map of the pointer variable itself,
1551 attach/detach maps have their field looked up by the *clause*
1552 tree expression, not the decl. */
1553 gcc_assert (!splay_tree_lookup (ctx->field_map,
1554 (splay_tree_key) c));
1555 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1556 (splay_tree_value) field);
1557 break;
1559 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1560 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1561 || (OMP_CLAUSE_MAP_KIND (c)
1562 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1564 if (TREE_CODE (decl) == COMPONENT_REF
1565 || (TREE_CODE (decl) == INDIRECT_REF
1566 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1567 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1568 == REFERENCE_TYPE)))
1569 break;
1570 if (DECL_SIZE (decl)
1571 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1573 tree decl2 = DECL_VALUE_EXPR (decl);
1574 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1575 decl2 = TREE_OPERAND (decl2, 0);
1576 gcc_assert (DECL_P (decl2));
1577 install_var_local (decl2, ctx);
1579 install_var_local (decl, ctx);
1580 break;
1582 if (DECL_P (decl))
1584 if (DECL_SIZE (decl)
1585 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1587 tree decl2 = DECL_VALUE_EXPR (decl);
1588 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1589 decl2 = TREE_OPERAND (decl2, 0);
1590 gcc_assert (DECL_P (decl2));
1591 install_var_field (decl2, true, 3, ctx);
1592 install_var_local (decl2, ctx);
1593 install_var_local (decl, ctx);
1595 else
1597 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1598 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1599 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1600 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1601 install_var_field (decl, true, 7, ctx);
1602 else
1603 install_var_field (decl, true, 3, ctx);
1604 if (is_gimple_omp_offloaded (ctx->stmt)
1605 && !(is_gimple_omp_oacc (ctx->stmt)
1606 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1607 install_var_local (decl, ctx);
1610 else
1612 tree base = get_base_address (decl);
1613 tree nc = OMP_CLAUSE_CHAIN (c);
1614 if (DECL_P (base)
1615 && nc != NULL_TREE
1616 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1617 && OMP_CLAUSE_DECL (nc) == base
1618 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1619 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1621 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1622 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1624 else
1626 if (ctx->outer)
1628 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1629 decl = OMP_CLAUSE_DECL (c);
1631 gcc_assert (!splay_tree_lookup (ctx->field_map,
1632 (splay_tree_key) decl));
1633 tree field
1634 = build_decl (OMP_CLAUSE_LOCATION (c),
1635 FIELD_DECL, NULL_TREE, ptr_type_node);
1636 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1637 insert_field_into_struct (ctx->record_type, field);
1638 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1639 (splay_tree_value) field);
1642 break;
1644 case OMP_CLAUSE_ORDER:
1645 ctx->order_concurrent = true;
1646 break;
1648 case OMP_CLAUSE_BIND:
1649 ctx->loop_p = true;
1650 break;
1652 case OMP_CLAUSE_NOWAIT:
1653 case OMP_CLAUSE_ORDERED:
1654 case OMP_CLAUSE_COLLAPSE:
1655 case OMP_CLAUSE_UNTIED:
1656 case OMP_CLAUSE_MERGEABLE:
1657 case OMP_CLAUSE_PROC_BIND:
1658 case OMP_CLAUSE_SAFELEN:
1659 case OMP_CLAUSE_SIMDLEN:
1660 case OMP_CLAUSE_THREADS:
1661 case OMP_CLAUSE_SIMD:
1662 case OMP_CLAUSE_NOGROUP:
1663 case OMP_CLAUSE_DEFAULTMAP:
1664 case OMP_CLAUSE_ASYNC:
1665 case OMP_CLAUSE_WAIT:
1666 case OMP_CLAUSE_GANG:
1667 case OMP_CLAUSE_WORKER:
1668 case OMP_CLAUSE_VECTOR:
1669 case OMP_CLAUSE_INDEPENDENT:
1670 case OMP_CLAUSE_AUTO:
1671 case OMP_CLAUSE_SEQ:
1672 case OMP_CLAUSE_TILE:
1673 case OMP_CLAUSE__SIMT_:
1674 case OMP_CLAUSE_DEFAULT:
1675 case OMP_CLAUSE_NONTEMPORAL:
1676 case OMP_CLAUSE_IF_PRESENT:
1677 case OMP_CLAUSE_FINALIZE:
1678 case OMP_CLAUSE_TASK_REDUCTION:
1679 case OMP_CLAUSE_ALLOCATE:
1680 break;
1682 case OMP_CLAUSE_ALIGNED:
1683 decl = OMP_CLAUSE_DECL (c);
1684 if (is_global_var (decl)
1685 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1686 install_var_local (decl, ctx);
1687 break;
1689 case OMP_CLAUSE__CONDTEMP_:
1690 decl = OMP_CLAUSE_DECL (c);
1691 if (is_parallel_ctx (ctx))
1693 install_var_field (decl, false, 3, ctx);
1694 install_var_local (decl, ctx);
1696 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1697 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1698 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1699 install_var_local (decl, ctx);
1700 break;
1702 case OMP_CLAUSE__CACHE_:
1703 case OMP_CLAUSE_NOHOST:
1704 default:
1705 gcc_unreachable ();
1709 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1711 switch (OMP_CLAUSE_CODE (c))
1713 case OMP_CLAUSE_LASTPRIVATE:
1714 /* Let the corresponding firstprivate clause create
1715 the variable. */
1716 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1717 scan_array_reductions = true;
1718 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1719 break;
1720 /* FALLTHRU */
1722 case OMP_CLAUSE_FIRSTPRIVATE:
1723 case OMP_CLAUSE_PRIVATE:
1724 case OMP_CLAUSE_LINEAR:
1725 case OMP_CLAUSE_IS_DEVICE_PTR:
1726 decl = OMP_CLAUSE_DECL (c);
1727 if (is_variable_sized (decl))
1729 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1730 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1731 && is_gimple_omp_offloaded (ctx->stmt))
1733 tree decl2 = DECL_VALUE_EXPR (decl);
1734 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1735 decl2 = TREE_OPERAND (decl2, 0);
1736 gcc_assert (DECL_P (decl2));
1737 install_var_local (decl2, ctx);
1738 fixup_remapped_decl (decl2, ctx, false);
1740 install_var_local (decl, ctx);
1742 fixup_remapped_decl (decl, ctx,
1743 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1744 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1745 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1746 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1747 scan_array_reductions = true;
1748 break;
1750 case OMP_CLAUSE_REDUCTION:
1751 case OMP_CLAUSE_IN_REDUCTION:
1752 decl = OMP_CLAUSE_DECL (c);
1753 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1755 if (is_variable_sized (decl))
1756 install_var_local (decl, ctx);
1757 fixup_remapped_decl (decl, ctx, false);
1759 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1760 scan_array_reductions = true;
1761 break;
1763 case OMP_CLAUSE_TASK_REDUCTION:
1764 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1765 scan_array_reductions = true;
1766 break;
1768 case OMP_CLAUSE_SHARED:
1769 /* Ignore shared directives in teams construct inside of
1770 target construct. */
1771 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1772 && !is_host_teams_ctx (ctx))
1773 break;
1774 decl = OMP_CLAUSE_DECL (c);
1775 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1776 break;
1777 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1779 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1780 ctx->outer)))
1781 break;
1782 bool by_ref = use_pointer_for_field (decl, ctx);
1783 install_var_field (decl, by_ref, 11, ctx);
1784 break;
1786 fixup_remapped_decl (decl, ctx, false);
1787 break;
1789 case OMP_CLAUSE_MAP:
1790 if (!is_gimple_omp_offloaded (ctx->stmt))
1791 break;
1792 decl = OMP_CLAUSE_DECL (c);
1793 if (DECL_P (decl)
1794 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1795 && (OMP_CLAUSE_MAP_KIND (c)
1796 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1797 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1798 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1799 && varpool_node::get_create (decl)->offloadable)
1800 break;
1801 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1802 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1803 && is_omp_target (ctx->stmt)
1804 && !is_gimple_omp_offloaded (ctx->stmt))
1805 break;
1806 if (DECL_P (decl))
1808 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1809 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1810 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1811 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1813 tree new_decl = lookup_decl (decl, ctx);
1814 TREE_TYPE (new_decl)
1815 = remap_type (TREE_TYPE (decl), &ctx->cb);
1817 else if (DECL_SIZE (decl)
1818 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1820 tree decl2 = DECL_VALUE_EXPR (decl);
1821 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1822 decl2 = TREE_OPERAND (decl2, 0);
1823 gcc_assert (DECL_P (decl2));
1824 fixup_remapped_decl (decl2, ctx, false);
1825 fixup_remapped_decl (decl, ctx, true);
1827 else
1828 fixup_remapped_decl (decl, ctx, false);
1830 break;
1832 case OMP_CLAUSE_COPYPRIVATE:
1833 case OMP_CLAUSE_COPYIN:
1834 case OMP_CLAUSE_DEFAULT:
1835 case OMP_CLAUSE_IF:
1836 case OMP_CLAUSE_NUM_THREADS:
1837 case OMP_CLAUSE_NUM_TEAMS:
1838 case OMP_CLAUSE_THREAD_LIMIT:
1839 case OMP_CLAUSE_DEVICE:
1840 case OMP_CLAUSE_SCHEDULE:
1841 case OMP_CLAUSE_DIST_SCHEDULE:
1842 case OMP_CLAUSE_NOWAIT:
1843 case OMP_CLAUSE_ORDERED:
1844 case OMP_CLAUSE_COLLAPSE:
1845 case OMP_CLAUSE_UNTIED:
1846 case OMP_CLAUSE_FINAL:
1847 case OMP_CLAUSE_MERGEABLE:
1848 case OMP_CLAUSE_PROC_BIND:
1849 case OMP_CLAUSE_SAFELEN:
1850 case OMP_CLAUSE_SIMDLEN:
1851 case OMP_CLAUSE_ALIGNED:
1852 case OMP_CLAUSE_DEPEND:
1853 case OMP_CLAUSE_DETACH:
1854 case OMP_CLAUSE_ALLOCATE:
1855 case OMP_CLAUSE__LOOPTEMP_:
1856 case OMP_CLAUSE__REDUCTEMP_:
1857 case OMP_CLAUSE_TO:
1858 case OMP_CLAUSE_FROM:
1859 case OMP_CLAUSE_PRIORITY:
1860 case OMP_CLAUSE_GRAINSIZE:
1861 case OMP_CLAUSE_NUM_TASKS:
1862 case OMP_CLAUSE_THREADS:
1863 case OMP_CLAUSE_SIMD:
1864 case OMP_CLAUSE_NOGROUP:
1865 case OMP_CLAUSE_DEFAULTMAP:
1866 case OMP_CLAUSE_ORDER:
1867 case OMP_CLAUSE_BIND:
1868 case OMP_CLAUSE_USE_DEVICE_PTR:
1869 case OMP_CLAUSE_USE_DEVICE_ADDR:
1870 case OMP_CLAUSE_NONTEMPORAL:
1871 case OMP_CLAUSE_ASYNC:
1872 case OMP_CLAUSE_WAIT:
1873 case OMP_CLAUSE_NUM_GANGS:
1874 case OMP_CLAUSE_NUM_WORKERS:
1875 case OMP_CLAUSE_VECTOR_LENGTH:
1876 case OMP_CLAUSE_GANG:
1877 case OMP_CLAUSE_WORKER:
1878 case OMP_CLAUSE_VECTOR:
1879 case OMP_CLAUSE_INDEPENDENT:
1880 case OMP_CLAUSE_AUTO:
1881 case OMP_CLAUSE_SEQ:
1882 case OMP_CLAUSE_TILE:
1883 case OMP_CLAUSE__SIMT_:
1884 case OMP_CLAUSE_IF_PRESENT:
1885 case OMP_CLAUSE_FINALIZE:
1886 case OMP_CLAUSE_FILTER:
1887 case OMP_CLAUSE__CONDTEMP_:
1888 break;
1890 case OMP_CLAUSE__CACHE_:
1891 case OMP_CLAUSE_NOHOST:
1892 default:
1893 gcc_unreachable ();
1897 gcc_checking_assert (!scan_array_reductions
1898 || !is_gimple_omp_oacc (ctx->stmt));
1899 if (scan_array_reductions)
1901 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1902 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1903 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1904 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1905 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1907 omp_context *rctx = ctx;
1908 if (is_omp_target (ctx->stmt))
1909 rctx = ctx->outer;
1910 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1911 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
1913 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1914 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1915 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1916 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1917 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1918 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1922 /* Create a new name for omp child function. Returns an identifier. */
1924 static tree
1925 create_omp_child_function_name (bool task_copy)
1927 return clone_function_name_numbered (current_function_decl,
1928 task_copy ? "_omp_cpyfn" : "_omp_fn");
1931 /* Return true if CTX may belong to offloaded code: either if current function
1932 is offloaded, or any enclosing context corresponds to a target region. */
1934 static bool
1935 omp_maybe_offloaded_ctx (omp_context *ctx)
1937 if (cgraph_node::get (current_function_decl)->offloadable)
1938 return true;
1939 for (; ctx; ctx = ctx->outer)
1940 if (is_gimple_omp_offloaded (ctx->stmt))
1941 return true;
1942 return false;
1945 /* Build a decl for the omp child function. It'll not contain a body
1946 yet, just the bare decl. */
1948 static void
1949 create_omp_child_function (omp_context *ctx, bool task_copy)
1951 tree decl, type, name, t;
1953 name = create_omp_child_function_name (task_copy);
1954 if (task_copy)
1955 type = build_function_type_list (void_type_node, ptr_type_node,
1956 ptr_type_node, NULL_TREE);
1957 else
1958 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1960 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1962 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1963 || !task_copy);
1964 if (!task_copy)
1965 ctx->cb.dst_fn = decl;
1966 else
1967 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1969 TREE_STATIC (decl) = 1;
1970 TREE_USED (decl) = 1;
1971 DECL_ARTIFICIAL (decl) = 1;
1972 DECL_IGNORED_P (decl) = 0;
1973 TREE_PUBLIC (decl) = 0;
1974 DECL_UNINLINABLE (decl) = 1;
1975 DECL_EXTERNAL (decl) = 0;
1976 DECL_CONTEXT (decl) = NULL_TREE;
1977 DECL_INITIAL (decl) = make_node (BLOCK);
1978 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1979 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1980 /* Remove omp declare simd attribute from the new attributes. */
1981 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1983 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1984 a = a2;
1985 a = TREE_CHAIN (a);
1986 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1987 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1988 *p = TREE_CHAIN (*p);
1989 else
1991 tree chain = TREE_CHAIN (*p);
1992 *p = copy_node (*p);
1993 p = &TREE_CHAIN (*p);
1994 *p = chain;
1997 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1998 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1999 DECL_FUNCTION_SPECIFIC_TARGET (decl)
2000 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2001 DECL_FUNCTION_VERSIONED (decl)
2002 = DECL_FUNCTION_VERSIONED (current_function_decl);
2004 if (omp_maybe_offloaded_ctx (ctx))
2006 cgraph_node::get_create (decl)->offloadable = 1;
2007 if (ENABLE_OFFLOADING)
2008 g->have_offload = true;
2011 if (cgraph_node::get_create (decl)->offloadable)
2013 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2014 ? "omp target entrypoint"
2015 : "omp declare target");
2016 if (lookup_attribute ("omp declare target",
2017 DECL_ATTRIBUTES (current_function_decl)))
2019 if (is_gimple_omp_offloaded (ctx->stmt))
2020 DECL_ATTRIBUTES (decl)
2021 = remove_attribute ("omp declare target",
2022 copy_list (DECL_ATTRIBUTES (decl)));
2023 else
2024 target_attr = NULL;
2026 if (target_attr)
2027 DECL_ATTRIBUTES (decl)
2028 = tree_cons (get_identifier (target_attr),
2029 NULL_TREE, DECL_ATTRIBUTES (decl));
2032 t = build_decl (DECL_SOURCE_LOCATION (decl),
2033 RESULT_DECL, NULL_TREE, void_type_node);
2034 DECL_ARTIFICIAL (t) = 1;
2035 DECL_IGNORED_P (t) = 1;
2036 DECL_CONTEXT (t) = decl;
2037 DECL_RESULT (decl) = t;
2039 tree data_name = get_identifier (".omp_data_i");
2040 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2041 ptr_type_node);
2042 DECL_ARTIFICIAL (t) = 1;
2043 DECL_NAMELESS (t) = 1;
2044 DECL_ARG_TYPE (t) = ptr_type_node;
2045 DECL_CONTEXT (t) = current_function_decl;
2046 TREE_USED (t) = 1;
2047 TREE_READONLY (t) = 1;
2048 DECL_ARGUMENTS (decl) = t;
2049 if (!task_copy)
2050 ctx->receiver_decl = t;
2051 else
2053 t = build_decl (DECL_SOURCE_LOCATION (decl),
2054 PARM_DECL, get_identifier (".omp_data_o"),
2055 ptr_type_node);
2056 DECL_ARTIFICIAL (t) = 1;
2057 DECL_NAMELESS (t) = 1;
2058 DECL_ARG_TYPE (t) = ptr_type_node;
2059 DECL_CONTEXT (t) = current_function_decl;
2060 TREE_USED (t) = 1;
2061 TREE_ADDRESSABLE (t) = 1;
2062 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2063 DECL_ARGUMENTS (decl) = t;
2066 /* Allocate memory for the function structure. The call to
2067 allocate_struct_function clobbers CFUN, so we need to restore
2068 it afterward. */
2069 push_struct_function (decl);
2070 cfun->function_end_locus = gimple_location (ctx->stmt);
2071 init_tree_ssa (cfun);
2072 pop_cfun ();
2075 /* Callback for walk_gimple_seq. Check if combined parallel
2076 contains gimple_omp_for_combined_into_p OMP_FOR. */
2078 tree
2079 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2080 bool *handled_ops_p,
2081 struct walk_stmt_info *wi)
2083 gimple *stmt = gsi_stmt (*gsi_p);
2085 *handled_ops_p = true;
2086 switch (gimple_code (stmt))
2088 WALK_SUBSTMTS;
2090 case GIMPLE_OMP_FOR:
2091 if (gimple_omp_for_combined_into_p (stmt)
2092 && gimple_omp_for_kind (stmt)
2093 == *(const enum gf_mask *) (wi->info))
2095 wi->info = stmt;
2096 return integer_zero_node;
2098 break;
2099 default:
2100 break;
2102 return NULL;
2105 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2107 static void
2108 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2109 omp_context *outer_ctx)
2111 struct walk_stmt_info wi;
2113 memset (&wi, 0, sizeof (wi));
2114 wi.val_only = true;
2115 wi.info = (void *) &msk;
2116 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2117 if (wi.info != (void *) &msk)
2119 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2120 struct omp_for_data fd;
2121 omp_extract_for_data (for_stmt, &fd, NULL);
2122 /* We need two temporaries with fd.loop.v type (istart/iend)
2123 and then (fd.collapse - 1) temporaries with the same
2124 type for count2 ... countN-1 vars if not constant. */
2125 size_t count = 2, i;
2126 tree type = fd.iter_type;
2127 if (fd.collapse > 1
2128 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2130 count += fd.collapse - 1;
2131 /* If there are lastprivate clauses on the inner
2132 GIMPLE_OMP_FOR, add one more temporaries for the total number
2133 of iterations (product of count1 ... countN-1). */
2134 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2135 OMP_CLAUSE_LASTPRIVATE)
2136 || (msk == GF_OMP_FOR_KIND_FOR
2137 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2138 OMP_CLAUSE_LASTPRIVATE)))
2140 tree temp = create_tmp_var (type);
2141 tree c = build_omp_clause (UNKNOWN_LOCATION,
2142 OMP_CLAUSE__LOOPTEMP_);
2143 insert_decl_map (&outer_ctx->cb, temp, temp);
2144 OMP_CLAUSE_DECL (c) = temp;
2145 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2146 gimple_omp_taskreg_set_clauses (stmt, c);
2148 if (fd.non_rect
2149 && fd.last_nonrect == fd.first_nonrect + 1)
2150 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2151 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2153 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2154 tree type2 = TREE_TYPE (v);
2155 count++;
2156 for (i = 0; i < 3; i++)
2158 tree temp = create_tmp_var (type2);
2159 tree c = build_omp_clause (UNKNOWN_LOCATION,
2160 OMP_CLAUSE__LOOPTEMP_);
2161 insert_decl_map (&outer_ctx->cb, temp, temp);
2162 OMP_CLAUSE_DECL (c) = temp;
2163 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2164 gimple_omp_taskreg_set_clauses (stmt, c);
2168 for (i = 0; i < count; i++)
2170 tree temp = create_tmp_var (type);
2171 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2172 insert_decl_map (&outer_ctx->cb, temp, temp);
2173 OMP_CLAUSE_DECL (c) = temp;
2174 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2175 gimple_omp_taskreg_set_clauses (stmt, c);
2178 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2179 && omp_find_clause (gimple_omp_task_clauses (stmt),
2180 OMP_CLAUSE_REDUCTION))
2182 tree type = build_pointer_type (pointer_sized_int_node);
2183 tree temp = create_tmp_var (type);
2184 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2185 insert_decl_map (&outer_ctx->cb, temp, temp);
2186 OMP_CLAUSE_DECL (c) = temp;
2187 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2188 gimple_omp_task_set_clauses (stmt, c);
2192 /* Scan an OpenMP parallel directive. */
2194 static void
2195 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2197 omp_context *ctx;
2198 tree name;
2199 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2201 /* Ignore parallel directives with empty bodies, unless there
2202 are copyin clauses. */
2203 if (optimize > 0
2204 && empty_body_p (gimple_omp_body (stmt))
2205 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2206 OMP_CLAUSE_COPYIN) == NULL)
2208 gsi_replace (gsi, gimple_build_nop (), false);
2209 return;
2212 if (gimple_omp_parallel_combined_p (stmt))
2213 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2214 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2215 OMP_CLAUSE_REDUCTION);
2216 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2217 if (OMP_CLAUSE_REDUCTION_TASK (c))
2219 tree type = build_pointer_type (pointer_sized_int_node);
2220 tree temp = create_tmp_var (type);
2221 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2222 if (outer_ctx)
2223 insert_decl_map (&outer_ctx->cb, temp, temp);
2224 OMP_CLAUSE_DECL (c) = temp;
2225 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2226 gimple_omp_parallel_set_clauses (stmt, c);
2227 break;
2229 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2230 break;
2232 ctx = new_omp_context (stmt, outer_ctx);
2233 taskreg_contexts.safe_push (ctx);
2234 if (taskreg_nesting_level > 1)
2235 ctx->is_nested = true;
2236 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2237 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2238 name = create_tmp_var_name (".omp_data_s");
2239 name = build_decl (gimple_location (stmt),
2240 TYPE_DECL, name, ctx->record_type);
2241 DECL_ARTIFICIAL (name) = 1;
2242 DECL_NAMELESS (name) = 1;
2243 TYPE_NAME (ctx->record_type) = name;
2244 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2245 create_omp_child_function (ctx, false);
2246 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2248 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2249 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2251 if (TYPE_FIELDS (ctx->record_type) == NULL)
2252 ctx->record_type = ctx->receiver_decl = NULL;
2255 /* Scan an OpenMP task directive. */
2257 static void
2258 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2260 omp_context *ctx;
2261 tree name, t;
2262 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2264 /* Ignore task directives with empty bodies, unless they have depend
2265 clause. */
2266 if (optimize > 0
2267 && gimple_omp_body (stmt)
2268 && empty_body_p (gimple_omp_body (stmt))
2269 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2271 gsi_replace (gsi, gimple_build_nop (), false);
2272 return;
2275 if (gimple_omp_task_taskloop_p (stmt))
2276 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2278 ctx = new_omp_context (stmt, outer_ctx);
2280 if (gimple_omp_task_taskwait_p (stmt))
2282 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2283 return;
2286 taskreg_contexts.safe_push (ctx);
2287 if (taskreg_nesting_level > 1)
2288 ctx->is_nested = true;
2289 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2290 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2291 name = create_tmp_var_name (".omp_data_s");
2292 name = build_decl (gimple_location (stmt),
2293 TYPE_DECL, name, ctx->record_type);
2294 DECL_ARTIFICIAL (name) = 1;
2295 DECL_NAMELESS (name) = 1;
2296 TYPE_NAME (ctx->record_type) = name;
2297 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2298 create_omp_child_function (ctx, false);
2299 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2301 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2303 if (ctx->srecord_type)
2305 name = create_tmp_var_name (".omp_data_a");
2306 name = build_decl (gimple_location (stmt),
2307 TYPE_DECL, name, ctx->srecord_type);
2308 DECL_ARTIFICIAL (name) = 1;
2309 DECL_NAMELESS (name) = 1;
2310 TYPE_NAME (ctx->srecord_type) = name;
2311 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2312 create_omp_child_function (ctx, true);
2315 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2317 if (TYPE_FIELDS (ctx->record_type) == NULL)
2319 ctx->record_type = ctx->receiver_decl = NULL;
2320 t = build_int_cst (long_integer_type_node, 0);
2321 gimple_omp_task_set_arg_size (stmt, t);
2322 t = build_int_cst (long_integer_type_node, 1);
2323 gimple_omp_task_set_arg_align (stmt, t);
2327 /* Helper function for finish_taskreg_scan, called through walk_tree.
2328 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2329 tree, replace it in the expression. */
2331 static tree
2332 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2334 if (VAR_P (*tp))
2336 omp_context *ctx = (omp_context *) data;
2337 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2338 if (t != *tp)
2340 if (DECL_HAS_VALUE_EXPR_P (t))
2341 t = unshare_expr (DECL_VALUE_EXPR (t));
2342 *tp = t;
2344 *walk_subtrees = 0;
2346 else if (IS_TYPE_OR_DECL_P (*tp))
2347 *walk_subtrees = 0;
2348 return NULL_TREE;
2351 /* If any decls have been made addressable during scan_omp,
2352 adjust their fields if needed, and layout record types
2353 of parallel/task constructs. */
2355 static void
2356 finish_taskreg_scan (omp_context *ctx)
2358 if (ctx->record_type == NULL_TREE)
2359 return;
2361 /* If any task_shared_vars were needed, verify all
2362 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2363 statements if use_pointer_for_field hasn't changed
2364 because of that. If it did, update field types now. */
2365 if (task_shared_vars)
2367 tree c;
2369 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2370 c; c = OMP_CLAUSE_CHAIN (c))
2371 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2372 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2374 tree decl = OMP_CLAUSE_DECL (c);
2376 /* Global variables don't need to be copied,
2377 the receiver side will use them directly. */
2378 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2379 continue;
2380 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2381 || !use_pointer_for_field (decl, ctx))
2382 continue;
2383 tree field = lookup_field (decl, ctx);
2384 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2385 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2386 continue;
2387 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2388 TREE_THIS_VOLATILE (field) = 0;
2389 DECL_USER_ALIGN (field) = 0;
2390 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2391 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2392 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2393 if (ctx->srecord_type)
2395 tree sfield = lookup_sfield (decl, ctx);
2396 TREE_TYPE (sfield) = TREE_TYPE (field);
2397 TREE_THIS_VOLATILE (sfield) = 0;
2398 DECL_USER_ALIGN (sfield) = 0;
2399 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2400 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2401 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2406 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2408 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2409 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2410 if (c)
2412 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2413 expects to find it at the start of data. */
2414 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2415 tree *p = &TYPE_FIELDS (ctx->record_type);
2416 while (*p)
2417 if (*p == f)
2419 *p = DECL_CHAIN (*p);
2420 break;
2422 else
2423 p = &DECL_CHAIN (*p);
2424 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2425 TYPE_FIELDS (ctx->record_type) = f;
2427 layout_type (ctx->record_type);
2428 fixup_child_record_type (ctx);
2430 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2432 layout_type (ctx->record_type);
2433 fixup_child_record_type (ctx);
2435 else
2437 location_t loc = gimple_location (ctx->stmt);
2438 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2439 tree detach_clause
2440 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2441 OMP_CLAUSE_DETACH);
2442 /* Move VLA fields to the end. */
2443 p = &TYPE_FIELDS (ctx->record_type);
2444 while (*p)
2445 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2446 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2448 *q = *p;
2449 *p = TREE_CHAIN (*p);
2450 TREE_CHAIN (*q) = NULL_TREE;
2451 q = &TREE_CHAIN (*q);
2453 else
2454 p = &DECL_CHAIN (*p);
2455 *p = vla_fields;
2456 if (gimple_omp_task_taskloop_p (ctx->stmt))
2458 /* Move fields corresponding to first and second _looptemp_
2459 clause first. There are filled by GOMP_taskloop
2460 and thus need to be in specific positions. */
2461 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2462 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2463 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2464 OMP_CLAUSE__LOOPTEMP_);
2465 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2466 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2467 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2468 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2469 p = &TYPE_FIELDS (ctx->record_type);
2470 while (*p)
2471 if (*p == f1 || *p == f2 || *p == f3)
2472 *p = DECL_CHAIN (*p);
2473 else
2474 p = &DECL_CHAIN (*p);
2475 DECL_CHAIN (f1) = f2;
2476 if (c3)
2478 DECL_CHAIN (f2) = f3;
2479 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2481 else
2482 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2483 TYPE_FIELDS (ctx->record_type) = f1;
2484 if (ctx->srecord_type)
2486 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2487 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2488 if (c3)
2489 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2490 p = &TYPE_FIELDS (ctx->srecord_type);
2491 while (*p)
2492 if (*p == f1 || *p == f2 || *p == f3)
2493 *p = DECL_CHAIN (*p);
2494 else
2495 p = &DECL_CHAIN (*p);
2496 DECL_CHAIN (f1) = f2;
2497 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2498 if (c3)
2500 DECL_CHAIN (f2) = f3;
2501 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2503 else
2504 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2505 TYPE_FIELDS (ctx->srecord_type) = f1;
2508 if (detach_clause)
2510 tree c, field;
2512 /* Look for a firstprivate clause with the detach event handle. */
2513 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2514 c; c = OMP_CLAUSE_CHAIN (c))
2516 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2517 continue;
2518 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2519 == OMP_CLAUSE_DECL (detach_clause))
2520 break;
2523 gcc_assert (c);
2524 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2526 /* Move field corresponding to the detach clause first.
2527 This is filled by GOMP_task and needs to be in a
2528 specific position. */
2529 p = &TYPE_FIELDS (ctx->record_type);
2530 while (*p)
2531 if (*p == field)
2532 *p = DECL_CHAIN (*p);
2533 else
2534 p = &DECL_CHAIN (*p);
2535 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2536 TYPE_FIELDS (ctx->record_type) = field;
2537 if (ctx->srecord_type)
2539 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2540 p = &TYPE_FIELDS (ctx->srecord_type);
2541 while (*p)
2542 if (*p == field)
2543 *p = DECL_CHAIN (*p);
2544 else
2545 p = &DECL_CHAIN (*p);
2546 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2547 TYPE_FIELDS (ctx->srecord_type) = field;
2550 layout_type (ctx->record_type);
2551 fixup_child_record_type (ctx);
2552 if (ctx->srecord_type)
2553 layout_type (ctx->srecord_type);
2554 tree t = fold_convert_loc (loc, long_integer_type_node,
2555 TYPE_SIZE_UNIT (ctx->record_type));
2556 if (TREE_CODE (t) != INTEGER_CST)
2558 t = unshare_expr (t);
2559 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2561 gimple_omp_task_set_arg_size (ctx->stmt, t);
2562 t = build_int_cst (long_integer_type_node,
2563 TYPE_ALIGN_UNIT (ctx->record_type));
2564 gimple_omp_task_set_arg_align (ctx->stmt, t);
2568 /* Find the enclosing offload context. */
2570 static omp_context *
2571 enclosing_target_ctx (omp_context *ctx)
2573 for (; ctx; ctx = ctx->outer)
2574 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2575 break;
2577 return ctx;
2580 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2581 construct.
2582 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2584 static bool
2585 ctx_in_oacc_kernels_region (omp_context *ctx)
2587 for (;ctx != NULL; ctx = ctx->outer)
2589 gimple *stmt = ctx->stmt;
2590 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2591 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2592 return true;
2595 return false;
2598 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2599 (This doesn't include OpenACC 'kernels' decomposed parts.)
2600 Until kernels handling moves to use the same loop indirection
2601 scheme as parallel, we need to do this checking early. */
2603 static unsigned
2604 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2606 bool checking = true;
2607 unsigned outer_mask = 0;
2608 unsigned this_mask = 0;
2609 bool has_seq = false, has_auto = false;
2611 if (ctx->outer)
2612 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2613 if (!stmt)
2615 checking = false;
2616 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2617 return outer_mask;
2618 stmt = as_a <gomp_for *> (ctx->stmt);
2621 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2623 switch (OMP_CLAUSE_CODE (c))
2625 case OMP_CLAUSE_GANG:
2626 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2627 break;
2628 case OMP_CLAUSE_WORKER:
2629 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2630 break;
2631 case OMP_CLAUSE_VECTOR:
2632 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2633 break;
2634 case OMP_CLAUSE_SEQ:
2635 has_seq = true;
2636 break;
2637 case OMP_CLAUSE_AUTO:
2638 has_auto = true;
2639 break;
2640 default:
2641 break;
2645 if (checking)
2647 if (has_seq && (this_mask || has_auto))
2648 error_at (gimple_location (stmt), "%<seq%> overrides other"
2649 " OpenACC loop specifiers");
2650 else if (has_auto && this_mask)
2651 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2652 " OpenACC loop specifiers");
2654 if (this_mask & outer_mask)
2655 error_at (gimple_location (stmt), "inner loop uses same"
2656 " OpenACC parallelism as containing loop");
2659 return outer_mask | this_mask;
2662 /* Scan a GIMPLE_OMP_FOR. */
2664 static omp_context *
2665 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2667 omp_context *ctx;
2668 size_t i;
2669 tree clauses = gimple_omp_for_clauses (stmt);
2671 ctx = new_omp_context (stmt, outer_ctx);
2673 if (is_gimple_omp_oacc (stmt))
2675 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2677 if (!(tgt && is_oacc_kernels (tgt)))
2678 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2680 tree c_op0;
2681 switch (OMP_CLAUSE_CODE (c))
2683 case OMP_CLAUSE_GANG:
2684 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2685 break;
2687 case OMP_CLAUSE_WORKER:
2688 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2689 break;
2691 case OMP_CLAUSE_VECTOR:
2692 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2693 break;
2695 default:
2696 continue;
2699 if (c_op0)
2701 /* By construction, this is impossible for OpenACC 'kernels'
2702 decomposed parts. */
2703 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2705 error_at (OMP_CLAUSE_LOCATION (c),
2706 "argument not permitted on %qs clause",
2707 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2708 if (tgt)
2709 inform (gimple_location (tgt->stmt),
2710 "enclosing parent compute construct");
2711 else if (oacc_get_fn_attrib (current_function_decl))
2712 inform (DECL_SOURCE_LOCATION (current_function_decl),
2713 "enclosing routine");
2714 else
2715 gcc_unreachable ();
2719 if (tgt && is_oacc_kernels (tgt))
2720 check_oacc_kernel_gwv (stmt, ctx);
2722 /* Collect all variables named in reductions on this loop. Ensure
2723 that, if this loop has a reduction on some variable v, and there is
2724 a reduction on v somewhere in an outer context, then there is a
2725 reduction on v on all intervening loops as well. */
2726 tree local_reduction_clauses = NULL;
2727 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2729 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2730 local_reduction_clauses
2731 = tree_cons (NULL, c, local_reduction_clauses);
2733 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2734 ctx->outer_reduction_clauses
2735 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2736 ctx->outer->outer_reduction_clauses);
2737 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2738 tree local_iter = local_reduction_clauses;
2739 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2741 tree local_clause = TREE_VALUE (local_iter);
2742 tree local_var = OMP_CLAUSE_DECL (local_clause);
2743 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2744 bool have_outer_reduction = false;
2745 tree ctx_iter = outer_reduction_clauses;
2746 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2748 tree outer_clause = TREE_VALUE (ctx_iter);
2749 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2750 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2751 if (outer_var == local_var && outer_op != local_op)
2753 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2754 "conflicting reduction operations for %qE",
2755 local_var);
2756 inform (OMP_CLAUSE_LOCATION (outer_clause),
2757 "location of the previous reduction for %qE",
2758 outer_var);
2760 if (outer_var == local_var)
2762 have_outer_reduction = true;
2763 break;
2766 if (have_outer_reduction)
2768 /* There is a reduction on outer_var both on this loop and on
2769 some enclosing loop. Walk up the context tree until such a
2770 loop with a reduction on outer_var is found, and complain
2771 about all intervening loops that do not have such a
2772 reduction. */
2773 struct omp_context *curr_loop = ctx->outer;
2774 bool found = false;
2775 while (curr_loop != NULL)
2777 tree curr_iter = curr_loop->local_reduction_clauses;
2778 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2780 tree curr_clause = TREE_VALUE (curr_iter);
2781 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2782 if (curr_var == local_var)
2784 found = true;
2785 break;
2788 if (!found)
2789 warning_at (gimple_location (curr_loop->stmt), 0,
2790 "nested loop in reduction needs "
2791 "reduction clause for %qE",
2792 local_var);
2793 else
2794 break;
2795 curr_loop = curr_loop->outer;
2799 ctx->local_reduction_clauses = local_reduction_clauses;
2800 ctx->outer_reduction_clauses
2801 = chainon (unshare_expr (ctx->local_reduction_clauses),
2802 ctx->outer_reduction_clauses);
2804 if (tgt && is_oacc_kernels (tgt))
2806 /* Strip out reductions, as they are not handled yet. */
2807 tree *prev_ptr = &clauses;
2809 while (tree probe = *prev_ptr)
2811 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2813 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2814 *prev_ptr = *next_ptr;
2815 else
2816 prev_ptr = next_ptr;
2819 gimple_omp_for_set_clauses (stmt, clauses);
2823 scan_sharing_clauses (clauses, ctx);
2825 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2826 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2828 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2829 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2830 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2831 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2833 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2834 return ctx;
2837 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2839 static void
2840 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2841 omp_context *outer_ctx)
2843 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2844 gsi_replace (gsi, bind, false);
2845 gimple_seq seq = NULL;
2846 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2847 tree cond = create_tmp_var_raw (integer_type_node);
2848 DECL_CONTEXT (cond) = current_function_decl;
2849 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2850 gimple_bind_set_vars (bind, cond);
2851 gimple_call_set_lhs (g, cond);
2852 gimple_seq_add_stmt (&seq, g);
2853 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2854 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2855 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2856 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2857 gimple_seq_add_stmt (&seq, g);
2858 g = gimple_build_label (lab1);
2859 gimple_seq_add_stmt (&seq, g);
2860 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2861 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2862 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2863 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2864 gimple_omp_for_set_clauses (new_stmt, clause);
2865 gimple_seq_add_stmt (&seq, new_stmt);
2866 g = gimple_build_goto (lab3);
2867 gimple_seq_add_stmt (&seq, g);
2868 g = gimple_build_label (lab2);
2869 gimple_seq_add_stmt (&seq, g);
2870 gimple_seq_add_stmt (&seq, stmt);
2871 g = gimple_build_label (lab3);
2872 gimple_seq_add_stmt (&seq, g);
2873 gimple_bind_set_body (bind, seq);
2874 update_stmt (bind);
2875 scan_omp_for (new_stmt, outer_ctx);
2876 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2879 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2880 struct walk_stmt_info *);
2881 static omp_context *maybe_lookup_ctx (gimple *);
2883 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2884 for scan phase loop. */
2886 static void
2887 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2888 omp_context *outer_ctx)
2890 /* The only change between inclusive and exclusive scan will be
2891 within the first simd loop, so just use inclusive in the
2892 worksharing loop. */
2893 outer_ctx->scan_inclusive = true;
2894 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2895 OMP_CLAUSE_DECL (c) = integer_zero_node;
2897 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2898 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2899 gsi_replace (gsi, input_stmt, false);
2900 gimple_seq input_body = NULL;
2901 gimple_seq_add_stmt (&input_body, stmt);
2902 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2904 gimple_stmt_iterator input1_gsi = gsi_none ();
2905 struct walk_stmt_info wi;
2906 memset (&wi, 0, sizeof (wi));
2907 wi.val_only = true;
2908 wi.info = (void *) &input1_gsi;
2909 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2910 gcc_assert (!gsi_end_p (input1_gsi));
2912 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2913 gsi_next (&input1_gsi);
2914 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2915 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2916 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2917 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2918 std::swap (input_stmt1, scan_stmt1);
2920 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2921 gimple_omp_set_body (input_stmt1, NULL);
2923 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2924 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2926 gimple_omp_set_body (input_stmt1, input_body1);
2927 gimple_omp_set_body (scan_stmt1, NULL);
2929 gimple_stmt_iterator input2_gsi = gsi_none ();
2930 memset (&wi, 0, sizeof (wi));
2931 wi.val_only = true;
2932 wi.info = (void *) &input2_gsi;
2933 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2934 NULL, &wi);
2935 gcc_assert (!gsi_end_p (input2_gsi));
2937 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2938 gsi_next (&input2_gsi);
2939 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2940 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2941 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2942 std::swap (input_stmt2, scan_stmt2);
2944 gimple_omp_set_body (input_stmt2, NULL);
2946 gimple_omp_set_body (input_stmt, input_body);
2947 gimple_omp_set_body (scan_stmt, scan_body);
2949 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2950 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2952 ctx = new_omp_context (scan_stmt, outer_ctx);
2953 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2955 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2958 /* Scan an OpenMP sections directive. */
2960 static void
2961 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2963 omp_context *ctx;
2965 ctx = new_omp_context (stmt, outer_ctx);
2966 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2967 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2970 /* Scan an OpenMP single directive. */
2972 static void
2973 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2975 omp_context *ctx;
2976 tree name;
2978 ctx = new_omp_context (stmt, outer_ctx);
2979 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2980 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2981 name = create_tmp_var_name (".omp_copy_s");
2982 name = build_decl (gimple_location (stmt),
2983 TYPE_DECL, name, ctx->record_type);
2984 TYPE_NAME (ctx->record_type) = name;
2986 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2987 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2989 if (TYPE_FIELDS (ctx->record_type) == NULL)
2990 ctx->record_type = NULL;
2991 else
2992 layout_type (ctx->record_type);
2995 /* Scan a GIMPLE_OMP_TARGET. */
2997 static void
2998 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3000 omp_context *ctx;
3001 tree name;
3002 bool offloaded = is_gimple_omp_offloaded (stmt);
3003 tree clauses = gimple_omp_target_clauses (stmt);
3005 ctx = new_omp_context (stmt, outer_ctx);
3006 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3007 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3008 name = create_tmp_var_name (".omp_data_t");
3009 name = build_decl (gimple_location (stmt),
3010 TYPE_DECL, name, ctx->record_type);
3011 DECL_ARTIFICIAL (name) = 1;
3012 DECL_NAMELESS (name) = 1;
3013 TYPE_NAME (ctx->record_type) = name;
3014 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3016 if (offloaded)
3018 create_omp_child_function (ctx, false);
3019 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3022 scan_sharing_clauses (clauses, ctx);
3023 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3025 if (TYPE_FIELDS (ctx->record_type) == NULL)
3026 ctx->record_type = ctx->receiver_decl = NULL;
3027 else
3029 TYPE_FIELDS (ctx->record_type)
3030 = nreverse (TYPE_FIELDS (ctx->record_type));
3031 if (flag_checking)
3033 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3034 for (tree field = TYPE_FIELDS (ctx->record_type);
3035 field;
3036 field = DECL_CHAIN (field))
3037 gcc_assert (DECL_ALIGN (field) == align);
3039 layout_type (ctx->record_type);
3040 if (offloaded)
3041 fixup_child_record_type (ctx);
3044 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3046 error_at (gimple_location (stmt),
3047 "%<target%> construct with nested %<teams%> construct "
3048 "contains directives outside of the %<teams%> construct");
3049 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3053 /* Scan an OpenMP teams directive. */
3055 static void
3056 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3058 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3060 if (!gimple_omp_teams_host (stmt))
3062 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3063 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3064 return;
3066 taskreg_contexts.safe_push (ctx);
3067 gcc_assert (taskreg_nesting_level == 1);
3068 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3069 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3070 tree name = create_tmp_var_name (".omp_data_s");
3071 name = build_decl (gimple_location (stmt),
3072 TYPE_DECL, name, ctx->record_type);
3073 DECL_ARTIFICIAL (name) = 1;
3074 DECL_NAMELESS (name) = 1;
3075 TYPE_NAME (ctx->record_type) = name;
3076 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3077 create_omp_child_function (ctx, false);
3078 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3080 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3081 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3083 if (TYPE_FIELDS (ctx->record_type) == NULL)
3084 ctx->record_type = ctx->receiver_decl = NULL;
3087 /* Check nesting restrictions. */
3088 static bool
3089 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3091 tree c;
3093 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3094 inside an OpenACC CTX. */
3095 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3096 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3097 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3099 else if (!(is_gimple_omp (stmt)
3100 && is_gimple_omp_oacc (stmt)))
3102 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3104 error_at (gimple_location (stmt),
3105 "non-OpenACC construct inside of OpenACC routine");
3106 return false;
3108 else
3109 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3110 if (is_gimple_omp (octx->stmt)
3111 && is_gimple_omp_oacc (octx->stmt))
3113 error_at (gimple_location (stmt),
3114 "non-OpenACC construct inside of OpenACC region");
3115 return false;
3119 if (ctx != NULL)
3121 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3122 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3124 c = omp_find_clause (gimple_omp_target_clauses (ctx->stmt),
3125 OMP_CLAUSE_DEVICE);
3126 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3128 error_at (gimple_location (stmt),
3129 "OpenMP constructs are not allowed in target region "
3130 "with %<ancestor%>");
3131 return false;
3134 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3135 ctx->teams_nested_p = true;
3136 else
3137 ctx->nonteams_nested_p = true;
3139 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3140 && ctx->outer
3141 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3142 ctx = ctx->outer;
3143 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3144 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3145 && !ctx->loop_p)
3147 c = NULL_TREE;
3148 if (ctx->order_concurrent
3149 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3150 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3151 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3153 error_at (gimple_location (stmt),
3154 "OpenMP constructs other than %<parallel%>, %<loop%>"
3155 " or %<simd%> may not be nested inside a region with"
3156 " the %<order(concurrent)%> clause");
3157 return false;
3159 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3161 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3162 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3164 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3165 && (ctx->outer == NULL
3166 || !gimple_omp_for_combined_into_p (ctx->stmt)
3167 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3168 || (gimple_omp_for_kind (ctx->outer->stmt)
3169 != GF_OMP_FOR_KIND_FOR)
3170 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3172 error_at (gimple_location (stmt),
3173 "%<ordered simd threads%> must be closely "
3174 "nested inside of %<%s simd%> region",
3175 lang_GNU_Fortran () ? "do" : "for");
3176 return false;
3178 return true;
3181 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3182 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3183 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3184 return true;
3185 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3186 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3187 return true;
3188 error_at (gimple_location (stmt),
3189 "OpenMP constructs other than "
3190 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3191 "not be nested inside %<simd%> region");
3192 return false;
3194 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3196 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3197 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3198 && omp_find_clause (gimple_omp_for_clauses (stmt),
3199 OMP_CLAUSE_BIND) == NULL_TREE))
3200 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3202 error_at (gimple_location (stmt),
3203 "only %<distribute%>, %<parallel%> or %<loop%> "
3204 "regions are allowed to be strictly nested inside "
3205 "%<teams%> region");
3206 return false;
3209 else if (ctx->order_concurrent
3210 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3211 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3212 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3213 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3215 if (ctx->loop_p)
3216 error_at (gimple_location (stmt),
3217 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3218 "%<simd%> may not be nested inside a %<loop%> region");
3219 else
3220 error_at (gimple_location (stmt),
3221 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3222 "%<simd%> may not be nested inside a region with "
3223 "the %<order(concurrent)%> clause");
3224 return false;
3227 switch (gimple_code (stmt))
3229 case GIMPLE_OMP_FOR:
3230 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3231 return true;
3232 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3234 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3236 error_at (gimple_location (stmt),
3237 "%<distribute%> region must be strictly nested "
3238 "inside %<teams%> construct");
3239 return false;
3241 return true;
3243 /* We split taskloop into task and nested taskloop in it. */
3244 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3245 return true;
3246 /* For now, hope this will change and loop bind(parallel) will not
3247 be allowed in lots of contexts. */
3248 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3249 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3250 return true;
3251 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3253 bool ok = false;
3255 if (ctx)
3256 switch (gimple_code (ctx->stmt))
3258 case GIMPLE_OMP_FOR:
3259 ok = (gimple_omp_for_kind (ctx->stmt)
3260 == GF_OMP_FOR_KIND_OACC_LOOP);
3261 break;
3263 case GIMPLE_OMP_TARGET:
3264 switch (gimple_omp_target_kind (ctx->stmt))
3266 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3267 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3268 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3269 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3270 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3271 ok = true;
3272 break;
3274 default:
3275 break;
3278 default:
3279 break;
3281 else if (oacc_get_fn_attrib (current_function_decl))
3282 ok = true;
3283 if (!ok)
3285 error_at (gimple_location (stmt),
3286 "OpenACC loop directive must be associated with"
3287 " an OpenACC compute region");
3288 return false;
3291 /* FALLTHRU */
3292 case GIMPLE_CALL:
3293 if (is_gimple_call (stmt)
3294 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3295 == BUILT_IN_GOMP_CANCEL
3296 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3297 == BUILT_IN_GOMP_CANCELLATION_POINT))
3299 const char *bad = NULL;
3300 const char *kind = NULL;
3301 const char *construct
3302 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3303 == BUILT_IN_GOMP_CANCEL)
3304 ? "cancel"
3305 : "cancellation point";
3306 if (ctx == NULL)
3308 error_at (gimple_location (stmt), "orphaned %qs construct",
3309 construct);
3310 return false;
3312 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3313 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3314 : 0)
3316 case 1:
3317 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3318 bad = "parallel";
3319 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3320 == BUILT_IN_GOMP_CANCEL
3321 && !integer_zerop (gimple_call_arg (stmt, 1)))
3322 ctx->cancellable = true;
3323 kind = "parallel";
3324 break;
3325 case 2:
3326 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3327 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3328 bad = "for";
3329 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3330 == BUILT_IN_GOMP_CANCEL
3331 && !integer_zerop (gimple_call_arg (stmt, 1)))
3333 ctx->cancellable = true;
3334 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3335 OMP_CLAUSE_NOWAIT))
3336 warning_at (gimple_location (stmt), 0,
3337 "%<cancel for%> inside "
3338 "%<nowait%> for construct");
3339 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3340 OMP_CLAUSE_ORDERED))
3341 warning_at (gimple_location (stmt), 0,
3342 "%<cancel for%> inside "
3343 "%<ordered%> for construct");
3345 kind = "for";
3346 break;
3347 case 4:
3348 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3349 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3350 bad = "sections";
3351 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3352 == BUILT_IN_GOMP_CANCEL
3353 && !integer_zerop (gimple_call_arg (stmt, 1)))
3355 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3357 ctx->cancellable = true;
3358 if (omp_find_clause (gimple_omp_sections_clauses
3359 (ctx->stmt),
3360 OMP_CLAUSE_NOWAIT))
3361 warning_at (gimple_location (stmt), 0,
3362 "%<cancel sections%> inside "
3363 "%<nowait%> sections construct");
3365 else
3367 gcc_assert (ctx->outer
3368 && gimple_code (ctx->outer->stmt)
3369 == GIMPLE_OMP_SECTIONS);
3370 ctx->outer->cancellable = true;
3371 if (omp_find_clause (gimple_omp_sections_clauses
3372 (ctx->outer->stmt),
3373 OMP_CLAUSE_NOWAIT))
3374 warning_at (gimple_location (stmt), 0,
3375 "%<cancel sections%> inside "
3376 "%<nowait%> sections construct");
3379 kind = "sections";
3380 break;
3381 case 8:
3382 if (!is_task_ctx (ctx)
3383 && (!is_taskloop_ctx (ctx)
3384 || ctx->outer == NULL
3385 || !is_task_ctx (ctx->outer)))
3386 bad = "task";
3387 else
3389 for (omp_context *octx = ctx->outer;
3390 octx; octx = octx->outer)
3392 switch (gimple_code (octx->stmt))
3394 case GIMPLE_OMP_TASKGROUP:
3395 break;
3396 case GIMPLE_OMP_TARGET:
3397 if (gimple_omp_target_kind (octx->stmt)
3398 != GF_OMP_TARGET_KIND_REGION)
3399 continue;
3400 /* FALLTHRU */
3401 case GIMPLE_OMP_PARALLEL:
3402 case GIMPLE_OMP_TEAMS:
3403 error_at (gimple_location (stmt),
3404 "%<%s taskgroup%> construct not closely "
3405 "nested inside of %<taskgroup%> region",
3406 construct);
3407 return false;
3408 case GIMPLE_OMP_TASK:
3409 if (gimple_omp_task_taskloop_p (octx->stmt)
3410 && octx->outer
3411 && is_taskloop_ctx (octx->outer))
3413 tree clauses
3414 = gimple_omp_for_clauses (octx->outer->stmt);
3415 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3416 break;
3418 continue;
3419 default:
3420 continue;
3422 break;
3424 ctx->cancellable = true;
3426 kind = "taskgroup";
3427 break;
3428 default:
3429 error_at (gimple_location (stmt), "invalid arguments");
3430 return false;
3432 if (bad)
3434 error_at (gimple_location (stmt),
3435 "%<%s %s%> construct not closely nested inside of %qs",
3436 construct, kind, bad);
3437 return false;
3440 /* FALLTHRU */
3441 case GIMPLE_OMP_SECTIONS:
3442 case GIMPLE_OMP_SINGLE:
3443 for (; ctx != NULL; ctx = ctx->outer)
3444 switch (gimple_code (ctx->stmt))
3446 case GIMPLE_OMP_FOR:
3447 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3448 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3449 break;
3450 /* FALLTHRU */
3451 case GIMPLE_OMP_SECTIONS:
3452 case GIMPLE_OMP_SINGLE:
3453 case GIMPLE_OMP_ORDERED:
3454 case GIMPLE_OMP_MASTER:
3455 case GIMPLE_OMP_MASKED:
3456 case GIMPLE_OMP_TASK:
3457 case GIMPLE_OMP_CRITICAL:
3458 if (is_gimple_call (stmt))
3460 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3461 != BUILT_IN_GOMP_BARRIER)
3462 return true;
3463 error_at (gimple_location (stmt),
3464 "barrier region may not be closely nested inside "
3465 "of work-sharing, %<loop%>, %<critical%>, "
3466 "%<ordered%>, %<master%>, %<masked%>, explicit "
3467 "%<task%> or %<taskloop%> region");
3468 return false;
3470 error_at (gimple_location (stmt),
3471 "work-sharing region may not be closely nested inside "
3472 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3473 "%<master%>, %<masked%>, explicit %<task%> or "
3474 "%<taskloop%> region");
3475 return false;
3476 case GIMPLE_OMP_PARALLEL:
3477 case GIMPLE_OMP_TEAMS:
3478 return true;
3479 case GIMPLE_OMP_TARGET:
3480 if (gimple_omp_target_kind (ctx->stmt)
3481 == GF_OMP_TARGET_KIND_REGION)
3482 return true;
3483 break;
3484 default:
3485 break;
3487 break;
3488 case GIMPLE_OMP_MASTER:
3489 case GIMPLE_OMP_MASKED:
3490 for (; ctx != NULL; ctx = ctx->outer)
3491 switch (gimple_code (ctx->stmt))
3493 case GIMPLE_OMP_FOR:
3494 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3495 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3496 break;
3497 /* FALLTHRU */
3498 case GIMPLE_OMP_SECTIONS:
3499 case GIMPLE_OMP_SINGLE:
3500 case GIMPLE_OMP_TASK:
3501 error_at (gimple_location (stmt),
3502 "%qs region may not be closely nested inside "
3503 "of work-sharing, %<loop%>, explicit %<task%> or "
3504 "%<taskloop%> region",
3505 gimple_code (stmt) == GIMPLE_OMP_MASTER
3506 ? "master" : "masked");
3507 return false;
3508 case GIMPLE_OMP_PARALLEL:
3509 case GIMPLE_OMP_TEAMS:
3510 return true;
3511 case GIMPLE_OMP_TARGET:
3512 if (gimple_omp_target_kind (ctx->stmt)
3513 == GF_OMP_TARGET_KIND_REGION)
3514 return true;
3515 break;
3516 default:
3517 break;
3519 break;
3520 case GIMPLE_OMP_SCOPE:
3521 for (; ctx != NULL; ctx = ctx->outer)
3522 switch (gimple_code (ctx->stmt))
3524 case GIMPLE_OMP_FOR:
3525 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3526 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3527 break;
3528 /* FALLTHRU */
3529 case GIMPLE_OMP_SECTIONS:
3530 case GIMPLE_OMP_SINGLE:
3531 case GIMPLE_OMP_TASK:
3532 case GIMPLE_OMP_CRITICAL:
3533 case GIMPLE_OMP_ORDERED:
3534 case GIMPLE_OMP_MASTER:
3535 case GIMPLE_OMP_MASKED:
3536 error_at (gimple_location (stmt),
3537 "%<scope%> region may not be closely nested inside "
3538 "of work-sharing, %<loop%>, explicit %<task%>, "
3539 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3540 "or %<masked%> region");
3541 return false;
3542 case GIMPLE_OMP_PARALLEL:
3543 case GIMPLE_OMP_TEAMS:
3544 return true;
3545 case GIMPLE_OMP_TARGET:
3546 if (gimple_omp_target_kind (ctx->stmt)
3547 == GF_OMP_TARGET_KIND_REGION)
3548 return true;
3549 break;
3550 default:
3551 break;
3553 break;
3554 case GIMPLE_OMP_TASK:
3555 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3556 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3557 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3558 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3560 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3561 error_at (OMP_CLAUSE_LOCATION (c),
3562 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3563 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3564 return false;
3566 break;
3567 case GIMPLE_OMP_ORDERED:
3568 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3569 c; c = OMP_CLAUSE_CHAIN (c))
3571 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3573 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3574 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3575 continue;
3577 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3578 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3579 || kind == OMP_CLAUSE_DEPEND_SINK)
3581 tree oclause;
3582 /* Look for containing ordered(N) loop. */
3583 if (ctx == NULL
3584 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3585 || (oclause
3586 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3587 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3589 error_at (OMP_CLAUSE_LOCATION (c),
3590 "%<ordered%> construct with %<depend%> clause "
3591 "must be closely nested inside an %<ordered%> "
3592 "loop");
3593 return false;
3595 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3597 error_at (OMP_CLAUSE_LOCATION (c),
3598 "%<ordered%> construct with %<depend%> clause "
3599 "must be closely nested inside a loop with "
3600 "%<ordered%> clause with a parameter");
3601 return false;
3604 else
3606 error_at (OMP_CLAUSE_LOCATION (c),
3607 "invalid depend kind in omp %<ordered%> %<depend%>");
3608 return false;
3611 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3612 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3614 /* ordered simd must be closely nested inside of simd region,
3615 and simd region must not encounter constructs other than
3616 ordered simd, therefore ordered simd may be either orphaned,
3617 or ctx->stmt must be simd. The latter case is handled already
3618 earlier. */
3619 if (ctx != NULL)
3621 error_at (gimple_location (stmt),
3622 "%<ordered%> %<simd%> must be closely nested inside "
3623 "%<simd%> region");
3624 return false;
3627 for (; ctx != NULL; ctx = ctx->outer)
3628 switch (gimple_code (ctx->stmt))
3630 case GIMPLE_OMP_CRITICAL:
3631 case GIMPLE_OMP_TASK:
3632 case GIMPLE_OMP_ORDERED:
3633 ordered_in_taskloop:
3634 error_at (gimple_location (stmt),
3635 "%<ordered%> region may not be closely nested inside "
3636 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3637 "%<taskloop%> region");
3638 return false;
3639 case GIMPLE_OMP_FOR:
3640 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3641 goto ordered_in_taskloop;
3642 tree o;
3643 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3644 OMP_CLAUSE_ORDERED);
3645 if (o == NULL)
3647 error_at (gimple_location (stmt),
3648 "%<ordered%> region must be closely nested inside "
3649 "a loop region with an %<ordered%> clause");
3650 return false;
3652 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3653 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3655 error_at (gimple_location (stmt),
3656 "%<ordered%> region without %<depend%> clause may "
3657 "not be closely nested inside a loop region with "
3658 "an %<ordered%> clause with a parameter");
3659 return false;
3661 return true;
3662 case GIMPLE_OMP_TARGET:
3663 if (gimple_omp_target_kind (ctx->stmt)
3664 != GF_OMP_TARGET_KIND_REGION)
3665 break;
3666 /* FALLTHRU */
3667 case GIMPLE_OMP_PARALLEL:
3668 case GIMPLE_OMP_TEAMS:
3669 error_at (gimple_location (stmt),
3670 "%<ordered%> region must be closely nested inside "
3671 "a loop region with an %<ordered%> clause");
3672 return false;
3673 default:
3674 break;
3676 break;
3677 case GIMPLE_OMP_CRITICAL:
3679 tree this_stmt_name
3680 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3681 for (; ctx != NULL; ctx = ctx->outer)
3682 if (gomp_critical *other_crit
3683 = dyn_cast <gomp_critical *> (ctx->stmt))
3684 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3686 error_at (gimple_location (stmt),
3687 "%<critical%> region may not be nested inside "
3688 "a %<critical%> region with the same name");
3689 return false;
3692 break;
3693 case GIMPLE_OMP_TEAMS:
3694 if (ctx == NULL)
3695 break;
3696 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3697 || (gimple_omp_target_kind (ctx->stmt)
3698 != GF_OMP_TARGET_KIND_REGION))
3700 /* Teams construct can appear either strictly nested inside of
3701 target construct with no intervening stmts, or can be encountered
3702 only by initial task (so must not appear inside any OpenMP
3703 construct. */
3704 error_at (gimple_location (stmt),
3705 "%<teams%> construct must be closely nested inside of "
3706 "%<target%> construct or not nested in any OpenMP "
3707 "construct");
3708 return false;
3710 break;
3711 case GIMPLE_OMP_TARGET:
3712 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3713 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3714 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3715 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3717 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3718 error_at (OMP_CLAUSE_LOCATION (c),
3719 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3720 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3721 return false;
3723 if (is_gimple_omp_offloaded (stmt)
3724 && oacc_get_fn_attrib (cfun->decl) != NULL)
3726 error_at (gimple_location (stmt),
3727 "OpenACC region inside of OpenACC routine, nested "
3728 "parallelism not supported yet");
3729 return false;
3731 for (; ctx != NULL; ctx = ctx->outer)
3733 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3735 if (is_gimple_omp (stmt)
3736 && is_gimple_omp_oacc (stmt)
3737 && is_gimple_omp (ctx->stmt))
3739 error_at (gimple_location (stmt),
3740 "OpenACC construct inside of non-OpenACC region");
3741 return false;
3743 continue;
3746 const char *stmt_name, *ctx_stmt_name;
3747 switch (gimple_omp_target_kind (stmt))
3749 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3750 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3751 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3752 case GF_OMP_TARGET_KIND_ENTER_DATA:
3753 stmt_name = "target enter data"; break;
3754 case GF_OMP_TARGET_KIND_EXIT_DATA:
3755 stmt_name = "target exit data"; break;
3756 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3757 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3758 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3759 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3760 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3761 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3762 stmt_name = "enter data"; break;
3763 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3764 stmt_name = "exit data"; break;
3765 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3766 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3767 break;
3768 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3769 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3770 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3771 /* OpenACC 'kernels' decomposed parts. */
3772 stmt_name = "kernels"; break;
3773 default: gcc_unreachable ();
3775 switch (gimple_omp_target_kind (ctx->stmt))
3777 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3778 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3779 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3780 ctx_stmt_name = "parallel"; break;
3781 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3782 ctx_stmt_name = "kernels"; break;
3783 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3784 ctx_stmt_name = "serial"; break;
3785 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3786 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3787 ctx_stmt_name = "host_data"; break;
3788 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3789 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3790 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3791 /* OpenACC 'kernels' decomposed parts. */
3792 ctx_stmt_name = "kernels"; break;
3793 default: gcc_unreachable ();
3796 /* OpenACC/OpenMP mismatch? */
3797 if (is_gimple_omp_oacc (stmt)
3798 != is_gimple_omp_oacc (ctx->stmt))
3800 error_at (gimple_location (stmt),
3801 "%s %qs construct inside of %s %qs region",
3802 (is_gimple_omp_oacc (stmt)
3803 ? "OpenACC" : "OpenMP"), stmt_name,
3804 (is_gimple_omp_oacc (ctx->stmt)
3805 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3806 return false;
3808 if (is_gimple_omp_offloaded (ctx->stmt))
3810 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3811 if (is_gimple_omp_oacc (ctx->stmt))
3813 error_at (gimple_location (stmt),
3814 "%qs construct inside of %qs region",
3815 stmt_name, ctx_stmt_name);
3816 return false;
3818 else
3820 warning_at (gimple_location (stmt), 0,
3821 "%qs construct inside of %qs region",
3822 stmt_name, ctx_stmt_name);
3826 break;
3827 default:
3828 break;
3830 return true;
3834 /* Helper function scan_omp.
3836 Callback for walk_tree or operators in walk_gimple_stmt used to
3837 scan for OMP directives in TP. */
3839 static tree
3840 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3842 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3843 omp_context *ctx = (omp_context *) wi->info;
3844 tree t = *tp;
3846 switch (TREE_CODE (t))
3848 case VAR_DECL:
3849 case PARM_DECL:
3850 case LABEL_DECL:
3851 case RESULT_DECL:
3852 if (ctx)
3854 tree repl = remap_decl (t, &ctx->cb);
3855 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3856 *tp = repl;
3858 break;
3860 default:
3861 if (ctx && TYPE_P (t))
3862 *tp = remap_type (t, &ctx->cb);
3863 else if (!DECL_P (t))
3865 *walk_subtrees = 1;
3866 if (ctx)
3868 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3869 if (tem != TREE_TYPE (t))
3871 if (TREE_CODE (t) == INTEGER_CST)
3872 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3873 else
3874 TREE_TYPE (t) = tem;
3878 break;
3881 return NULL_TREE;
3884 /* Return true if FNDECL is a setjmp or a longjmp. */
3886 static bool
3887 setjmp_or_longjmp_p (const_tree fndecl)
3889 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3890 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3891 return true;
3893 tree declname = DECL_NAME (fndecl);
3894 if (!declname
3895 || (DECL_CONTEXT (fndecl) != NULL_TREE
3896 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3897 || !TREE_PUBLIC (fndecl))
3898 return false;
3900 const char *name = IDENTIFIER_POINTER (declname);
3901 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3904 /* Return true if FNDECL is an omp_* runtime API call. */
3906 static bool
3907 omp_runtime_api_call (const_tree fndecl)
3909 tree declname = DECL_NAME (fndecl);
3910 if (!declname
3911 || (DECL_CONTEXT (fndecl) != NULL_TREE
3912 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3913 || !TREE_PUBLIC (fndecl))
3914 return false;
3916 const char *name = IDENTIFIER_POINTER (declname);
3917 if (!startswith (name, "omp_"))
3918 return false;
3920 static const char *omp_runtime_apis[] =
3922 /* This array has 3 sections. First omp_* calls that don't
3923 have any suffixes. */
3924 "aligned_alloc",
3925 "aligned_calloc",
3926 "alloc",
3927 "calloc",
3928 "free",
3929 "realloc",
3930 "target_alloc",
3931 "target_associate_ptr",
3932 "target_disassociate_ptr",
3933 "target_free",
3934 "target_is_present",
3935 "target_memcpy",
3936 "target_memcpy_rect",
3937 NULL,
3938 /* Now omp_* calls that are available as omp_* and omp_*_. */
3939 "capture_affinity",
3940 "destroy_allocator",
3941 "destroy_lock",
3942 "destroy_nest_lock",
3943 "display_affinity",
3944 "fulfill_event",
3945 "get_active_level",
3946 "get_affinity_format",
3947 "get_cancellation",
3948 "get_default_allocator",
3949 "get_default_device",
3950 "get_device_num",
3951 "get_dynamic",
3952 "get_initial_device",
3953 "get_level",
3954 "get_max_active_levels",
3955 "get_max_task_priority",
3956 "get_max_teams",
3957 "get_max_threads",
3958 "get_nested",
3959 "get_num_devices",
3960 "get_num_places",
3961 "get_num_procs",
3962 "get_num_teams",
3963 "get_num_threads",
3964 "get_partition_num_places",
3965 "get_place_num",
3966 "get_proc_bind",
3967 "get_supported_active_levels",
3968 "get_team_num",
3969 "get_teams_thread_limit",
3970 "get_thread_limit",
3971 "get_thread_num",
3972 "get_wtick",
3973 "get_wtime",
3974 "in_final",
3975 "in_parallel",
3976 "init_lock",
3977 "init_nest_lock",
3978 "is_initial_device",
3979 "pause_resource",
3980 "pause_resource_all",
3981 "set_affinity_format",
3982 "set_default_allocator",
3983 "set_lock",
3984 "set_nest_lock",
3985 "test_lock",
3986 "test_nest_lock",
3987 "unset_lock",
3988 "unset_nest_lock",
3989 NULL,
3990 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3991 "display_env",
3992 "get_ancestor_thread_num",
3993 "init_allocator",
3994 "get_partition_place_nums",
3995 "get_place_num_procs",
3996 "get_place_proc_ids",
3997 "get_schedule",
3998 "get_team_size",
3999 "set_default_device",
4000 "set_dynamic",
4001 "set_max_active_levels",
4002 "set_nested",
4003 "set_num_teams",
4004 "set_num_threads",
4005 "set_schedule",
4006 "set_teams_thread_limit"
4009 int mode = 0;
4010 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
4012 if (omp_runtime_apis[i] == NULL)
4014 mode++;
4015 continue;
4017 size_t len = strlen (omp_runtime_apis[i]);
4018 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
4019 && (name[4 + len] == '\0'
4020 || (mode > 0
4021 && name[4 + len] == '_'
4022 && (name[4 + len + 1] == '\0'
4023 || (mode > 1
4024 && strcmp (name + 4 + len + 1, "8_") == 0)))))
4025 return true;
4027 return false;
4030 /* Helper function for scan_omp.
4032 Callback for walk_gimple_stmt used to scan for OMP directives in
4033 the current statement in GSI. */
4035 static tree
4036 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4037 struct walk_stmt_info *wi)
4039 gimple *stmt = gsi_stmt (*gsi);
4040 omp_context *ctx = (omp_context *) wi->info;
4042 if (gimple_has_location (stmt))
4043 input_location = gimple_location (stmt);
4045 /* Check the nesting restrictions. */
4046 bool remove = false;
4047 if (is_gimple_omp (stmt))
4048 remove = !check_omp_nesting_restrictions (stmt, ctx);
4049 else if (is_gimple_call (stmt))
4051 tree fndecl = gimple_call_fndecl (stmt);
4052 if (fndecl)
4054 if (ctx
4055 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4056 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4057 && setjmp_or_longjmp_p (fndecl)
4058 && !ctx->loop_p)
4060 remove = true;
4061 error_at (gimple_location (stmt),
4062 "setjmp/longjmp inside %<simd%> construct");
4064 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4065 switch (DECL_FUNCTION_CODE (fndecl))
4067 case BUILT_IN_GOMP_BARRIER:
4068 case BUILT_IN_GOMP_CANCEL:
4069 case BUILT_IN_GOMP_CANCELLATION_POINT:
4070 case BUILT_IN_GOMP_TASKYIELD:
4071 case BUILT_IN_GOMP_TASKWAIT:
4072 case BUILT_IN_GOMP_TASKGROUP_START:
4073 case BUILT_IN_GOMP_TASKGROUP_END:
4074 remove = !check_omp_nesting_restrictions (stmt, ctx);
4075 break;
4076 default:
4077 break;
4079 else if (ctx)
4081 omp_context *octx = ctx;
4082 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4083 octx = ctx->outer;
4084 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4086 remove = true;
4087 error_at (gimple_location (stmt),
4088 "OpenMP runtime API call %qD in a region with "
4089 "%<order(concurrent)%> clause", fndecl);
4091 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
4092 && (gimple_omp_target_kind (ctx->stmt)
4093 == GF_OMP_TARGET_KIND_REGION))
4095 tree tgt_clauses = gimple_omp_target_clauses (ctx->stmt);
4096 tree c = omp_find_clause (tgt_clauses, OMP_CLAUSE_DEVICE);
4097 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4098 error_at (gimple_location (stmt),
4099 "OpenMP runtime API call %qD in a region with "
4100 "%<device(ancestor)%> clause", fndecl);
4105 if (remove)
4107 stmt = gimple_build_nop ();
4108 gsi_replace (gsi, stmt, false);
4111 *handled_ops_p = true;
4113 switch (gimple_code (stmt))
4115 case GIMPLE_OMP_PARALLEL:
4116 taskreg_nesting_level++;
4117 scan_omp_parallel (gsi, ctx);
4118 taskreg_nesting_level--;
4119 break;
4121 case GIMPLE_OMP_TASK:
4122 taskreg_nesting_level++;
4123 scan_omp_task (gsi, ctx);
4124 taskreg_nesting_level--;
4125 break;
4127 case GIMPLE_OMP_FOR:
4128 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4129 == GF_OMP_FOR_KIND_SIMD)
4130 && gimple_omp_for_combined_into_p (stmt)
4131 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4133 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4134 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4135 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4137 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4138 break;
4141 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4142 == GF_OMP_FOR_KIND_SIMD)
4143 && omp_maybe_offloaded_ctx (ctx)
4144 && omp_max_simt_vf ()
4145 && gimple_omp_for_collapse (stmt) == 1)
4146 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4147 else
4148 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4149 break;
4151 case GIMPLE_OMP_SCOPE:
4152 ctx = new_omp_context (stmt, ctx);
4153 scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4154 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4155 break;
4157 case GIMPLE_OMP_SECTIONS:
4158 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4159 break;
4161 case GIMPLE_OMP_SINGLE:
4162 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4163 break;
4165 case GIMPLE_OMP_SCAN:
4166 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4168 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4169 ctx->scan_inclusive = true;
4170 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4171 ctx->scan_exclusive = true;
4173 /* FALLTHRU */
4174 case GIMPLE_OMP_SECTION:
4175 case GIMPLE_OMP_MASTER:
4176 case GIMPLE_OMP_ORDERED:
4177 case GIMPLE_OMP_CRITICAL:
4178 ctx = new_omp_context (stmt, ctx);
4179 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4180 break;
4182 case GIMPLE_OMP_MASKED:
4183 ctx = new_omp_context (stmt, ctx);
4184 scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4185 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4186 break;
4188 case GIMPLE_OMP_TASKGROUP:
4189 ctx = new_omp_context (stmt, ctx);
4190 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4191 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4192 break;
4194 case GIMPLE_OMP_TARGET:
4195 if (is_gimple_omp_offloaded (stmt))
4197 taskreg_nesting_level++;
4198 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4199 taskreg_nesting_level--;
4201 else
4202 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4203 break;
4205 case GIMPLE_OMP_TEAMS:
4206 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4208 taskreg_nesting_level++;
4209 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4210 taskreg_nesting_level--;
4212 else
4213 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4214 break;
4216 case GIMPLE_BIND:
4218 tree var;
4220 *handled_ops_p = false;
4221 if (ctx)
4222 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4223 var ;
4224 var = DECL_CHAIN (var))
4225 insert_decl_map (&ctx->cb, var, var);
4227 break;
4228 default:
4229 *handled_ops_p = false;
4230 break;
4233 return NULL_TREE;
4237 /* Scan all the statements starting at the current statement. CTX
4238 contains context information about the OMP directives and
4239 clauses found during the scan. */
4241 static void
4242 scan_omp (gimple_seq *body_p, omp_context *ctx)
4244 location_t saved_location;
4245 struct walk_stmt_info wi;
4247 memset (&wi, 0, sizeof (wi));
4248 wi.info = ctx;
4249 wi.want_locations = true;
4251 saved_location = input_location;
4252 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4253 input_location = saved_location;
4256 /* Re-gimplification and code generation routines. */
4258 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4259 of BIND if in a method. */
4261 static void
4262 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4264 if (DECL_ARGUMENTS (current_function_decl)
4265 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4266 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4267 == POINTER_TYPE))
4269 tree vars = gimple_bind_vars (bind);
4270 for (tree *pvar = &vars; *pvar; )
4271 if (omp_member_access_dummy_var (*pvar))
4272 *pvar = DECL_CHAIN (*pvar);
4273 else
4274 pvar = &DECL_CHAIN (*pvar);
4275 gimple_bind_set_vars (bind, vars);
4279 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4280 block and its subblocks. */
4282 static void
4283 remove_member_access_dummy_vars (tree block)
4285 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4286 if (omp_member_access_dummy_var (*pvar))
4287 *pvar = DECL_CHAIN (*pvar);
4288 else
4289 pvar = &DECL_CHAIN (*pvar);
4291 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4292 remove_member_access_dummy_vars (block);
4295 /* If a context was created for STMT when it was scanned, return it. */
4297 static omp_context *
4298 maybe_lookup_ctx (gimple *stmt)
4300 splay_tree_node n;
4301 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4302 return n ? (omp_context *) n->value : NULL;
4306 /* Find the mapping for DECL in CTX or the immediately enclosing
4307 context that has a mapping for DECL.
4309 If CTX is a nested parallel directive, we may have to use the decl
4310 mappings created in CTX's parent context. Suppose that we have the
4311 following parallel nesting (variable UIDs showed for clarity):
4313 iD.1562 = 0;
4314 #omp parallel shared(iD.1562) -> outer parallel
4315 iD.1562 = iD.1562 + 1;
4317 #omp parallel shared (iD.1562) -> inner parallel
4318 iD.1562 = iD.1562 - 1;
4320 Each parallel structure will create a distinct .omp_data_s structure
4321 for copying iD.1562 in/out of the directive:
4323 outer parallel .omp_data_s.1.i -> iD.1562
4324 inner parallel .omp_data_s.2.i -> iD.1562
4326 A shared variable mapping will produce a copy-out operation before
4327 the parallel directive and a copy-in operation after it. So, in
4328 this case we would have:
4330 iD.1562 = 0;
4331 .omp_data_o.1.i = iD.1562;
4332 #omp parallel shared(iD.1562) -> outer parallel
4333 .omp_data_i.1 = &.omp_data_o.1
4334 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4336 .omp_data_o.2.i = iD.1562; -> **
4337 #omp parallel shared(iD.1562) -> inner parallel
4338 .omp_data_i.2 = &.omp_data_o.2
4339 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4342 ** This is a problem. The symbol iD.1562 cannot be referenced
4343 inside the body of the outer parallel region. But since we are
4344 emitting this copy operation while expanding the inner parallel
4345 directive, we need to access the CTX structure of the outer
4346 parallel directive to get the correct mapping:
4348 .omp_data_o.2.i = .omp_data_i.1->i
4350 Since there may be other workshare or parallel directives enclosing
4351 the parallel directive, it may be necessary to walk up the context
4352 parent chain. This is not a problem in general because nested
4353 parallelism happens only rarely. */
4355 static tree
4356 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4358 tree t;
4359 omp_context *up;
4361 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4362 t = maybe_lookup_decl (decl, up);
4364 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4366 return t ? t : decl;
4370 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4371 in outer contexts. */
4373 static tree
4374 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4376 tree t = NULL;
4377 omp_context *up;
4379 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4380 t = maybe_lookup_decl (decl, up);
4382 return t ? t : decl;
4386 /* Construct the initialization value for reduction operation OP. */
4388 tree
4389 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4391 switch (op)
4393 case PLUS_EXPR:
4394 case MINUS_EXPR:
4395 case BIT_IOR_EXPR:
4396 case BIT_XOR_EXPR:
4397 case TRUTH_OR_EXPR:
4398 case TRUTH_ORIF_EXPR:
4399 case TRUTH_XOR_EXPR:
4400 case NE_EXPR:
4401 return build_zero_cst (type);
4403 case MULT_EXPR:
4404 case TRUTH_AND_EXPR:
4405 case TRUTH_ANDIF_EXPR:
4406 case EQ_EXPR:
4407 return fold_convert_loc (loc, type, integer_one_node);
4409 case BIT_AND_EXPR:
4410 return fold_convert_loc (loc, type, integer_minus_one_node);
4412 case MAX_EXPR:
4413 if (SCALAR_FLOAT_TYPE_P (type))
4415 REAL_VALUE_TYPE max, min;
4416 if (HONOR_INFINITIES (type))
4418 real_inf (&max);
4419 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4421 else
4422 real_maxval (&min, 1, TYPE_MODE (type));
4423 return build_real (type, min);
4425 else if (POINTER_TYPE_P (type))
4427 wide_int min
4428 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4429 return wide_int_to_tree (type, min);
4431 else
4433 gcc_assert (INTEGRAL_TYPE_P (type));
4434 return TYPE_MIN_VALUE (type);
4437 case MIN_EXPR:
4438 if (SCALAR_FLOAT_TYPE_P (type))
4440 REAL_VALUE_TYPE max;
4441 if (HONOR_INFINITIES (type))
4442 real_inf (&max);
4443 else
4444 real_maxval (&max, 0, TYPE_MODE (type));
4445 return build_real (type, max);
4447 else if (POINTER_TYPE_P (type))
4449 wide_int max
4450 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4451 return wide_int_to_tree (type, max);
4453 else
4455 gcc_assert (INTEGRAL_TYPE_P (type));
4456 return TYPE_MAX_VALUE (type);
4459 default:
4460 gcc_unreachable ();
4464 /* Construct the initialization value for reduction CLAUSE. */
4466 tree
4467 omp_reduction_init (tree clause, tree type)
4469 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4470 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4473 /* Return alignment to be assumed for var in CLAUSE, which should be
4474 OMP_CLAUSE_ALIGNED. */
4476 static tree
4477 omp_clause_aligned_alignment (tree clause)
4479 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4480 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4482 /* Otherwise return implementation defined alignment. */
4483 unsigned int al = 1;
4484 opt_scalar_mode mode_iter;
4485 auto_vector_modes modes;
4486 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4487 static enum mode_class classes[]
4488 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4489 for (int i = 0; i < 4; i += 2)
4490 /* The for loop above dictates that we only walk through scalar classes. */
4491 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4493 scalar_mode mode = mode_iter.require ();
4494 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4495 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4496 continue;
4497 machine_mode alt_vmode;
4498 for (unsigned int j = 0; j < modes.length (); ++j)
4499 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4500 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4501 vmode = alt_vmode;
4503 tree type = lang_hooks.types.type_for_mode (mode, 1);
4504 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4505 continue;
4506 type = build_vector_type_for_mode (type, vmode);
4507 if (TYPE_MODE (type) != vmode)
4508 continue;
4509 if (TYPE_ALIGN_UNIT (type) > al)
4510 al = TYPE_ALIGN_UNIT (type);
4512 return build_int_cst (integer_type_node, al);
4516 /* This structure is part of the interface between lower_rec_simd_input_clauses
4517 and lower_rec_input_clauses. */
4519 class omplow_simd_context {
4520 public:
4521 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4522 tree idx;
4523 tree lane;
4524 tree lastlane;
4525 vec<tree, va_heap> simt_eargs;
4526 gimple_seq simt_dlist;
4527 poly_uint64_pod max_vf;
4528 bool is_simt;
4531 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4532 privatization. */
4534 static bool
4535 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4536 omplow_simd_context *sctx, tree &ivar,
4537 tree &lvar, tree *rvar = NULL,
4538 tree *rvar2 = NULL)
4540 if (known_eq (sctx->max_vf, 0U))
4542 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4543 if (maybe_gt (sctx->max_vf, 1U))
4545 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4546 OMP_CLAUSE_SAFELEN);
4547 if (c)
4549 poly_uint64 safe_len;
4550 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4551 || maybe_lt (safe_len, 1U))
4552 sctx->max_vf = 1;
4553 else
4554 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4557 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4559 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4560 c = OMP_CLAUSE_CHAIN (c))
4562 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4563 continue;
4565 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4567 /* UDR reductions are not supported yet for SIMT, disable
4568 SIMT. */
4569 sctx->max_vf = 1;
4570 break;
4573 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4574 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4576 /* Doing boolean operations on non-integral types is
4577 for conformance only, it's not worth supporting this
4578 for SIMT. */
4579 sctx->max_vf = 1;
4580 break;
4584 if (maybe_gt (sctx->max_vf, 1U))
4586 sctx->idx = create_tmp_var (unsigned_type_node);
4587 sctx->lane = create_tmp_var (unsigned_type_node);
4590 if (known_eq (sctx->max_vf, 1U))
4591 return false;
4593 if (sctx->is_simt)
4595 if (is_gimple_reg (new_var))
4597 ivar = lvar = new_var;
4598 return true;
4600 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4601 ivar = lvar = create_tmp_var (type);
4602 TREE_ADDRESSABLE (ivar) = 1;
4603 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4604 NULL, DECL_ATTRIBUTES (ivar));
4605 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4606 tree clobber = build_clobber (type);
4607 gimple *g = gimple_build_assign (ivar, clobber);
4608 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4610 else
4612 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4613 tree avar = create_tmp_var_raw (atype);
4614 if (TREE_ADDRESSABLE (new_var))
4615 TREE_ADDRESSABLE (avar) = 1;
4616 DECL_ATTRIBUTES (avar)
4617 = tree_cons (get_identifier ("omp simd array"), NULL,
4618 DECL_ATTRIBUTES (avar));
4619 gimple_add_tmp_var (avar);
4620 tree iavar = avar;
4621 if (rvar && !ctx->for_simd_scan_phase)
4623 /* For inscan reductions, create another array temporary,
4624 which will hold the reduced value. */
4625 iavar = create_tmp_var_raw (atype);
4626 if (TREE_ADDRESSABLE (new_var))
4627 TREE_ADDRESSABLE (iavar) = 1;
4628 DECL_ATTRIBUTES (iavar)
4629 = tree_cons (get_identifier ("omp simd array"), NULL,
4630 tree_cons (get_identifier ("omp simd inscan"), NULL,
4631 DECL_ATTRIBUTES (iavar)));
4632 gimple_add_tmp_var (iavar);
4633 ctx->cb.decl_map->put (avar, iavar);
4634 if (sctx->lastlane == NULL_TREE)
4635 sctx->lastlane = create_tmp_var (unsigned_type_node);
4636 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4637 sctx->lastlane, NULL_TREE, NULL_TREE);
4638 TREE_THIS_NOTRAP (*rvar) = 1;
4640 if (ctx->scan_exclusive)
4642 /* And for exclusive scan yet another one, which will
4643 hold the value during the scan phase. */
4644 tree savar = create_tmp_var_raw (atype);
4645 if (TREE_ADDRESSABLE (new_var))
4646 TREE_ADDRESSABLE (savar) = 1;
4647 DECL_ATTRIBUTES (savar)
4648 = tree_cons (get_identifier ("omp simd array"), NULL,
4649 tree_cons (get_identifier ("omp simd inscan "
4650 "exclusive"), NULL,
4651 DECL_ATTRIBUTES (savar)));
4652 gimple_add_tmp_var (savar);
4653 ctx->cb.decl_map->put (iavar, savar);
4654 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4655 sctx->idx, NULL_TREE, NULL_TREE);
4656 TREE_THIS_NOTRAP (*rvar2) = 1;
4659 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4660 NULL_TREE, NULL_TREE);
4661 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4662 NULL_TREE, NULL_TREE);
4663 TREE_THIS_NOTRAP (ivar) = 1;
4664 TREE_THIS_NOTRAP (lvar) = 1;
4666 if (DECL_P (new_var))
4668 SET_DECL_VALUE_EXPR (new_var, lvar);
4669 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4671 return true;
4674 /* Helper function of lower_rec_input_clauses. For a reference
4675 in simd reduction, add an underlying variable it will reference. */
4677 static void
4678 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4680 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4681 if (TREE_CONSTANT (z))
4683 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4684 get_name (new_vard));
4685 gimple_add_tmp_var (z);
4686 TREE_ADDRESSABLE (z) = 1;
4687 z = build_fold_addr_expr_loc (loc, z);
4688 gimplify_assign (new_vard, z, ilist);
4692 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4693 code to emit (type) (tskred_temp[idx]). */
4695 static tree
4696 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4697 unsigned idx)
4699 unsigned HOST_WIDE_INT sz
4700 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4701 tree r = build2 (MEM_REF, pointer_sized_int_node,
4702 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4703 idx * sz));
4704 tree v = create_tmp_var (pointer_sized_int_node);
4705 gimple *g = gimple_build_assign (v, r);
4706 gimple_seq_add_stmt (ilist, g);
4707 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4709 v = create_tmp_var (type);
4710 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4711 gimple_seq_add_stmt (ilist, g);
4713 return v;
4716 /* Lower early initialization of privatized variable NEW_VAR
4717 if it needs an allocator (has allocate clause). */
4719 static bool
4720 lower_private_allocate (tree var, tree new_var, tree &allocator,
4721 tree &allocate_ptr, gimple_seq *ilist,
4722 omp_context *ctx, bool is_ref, tree size)
4724 if (allocator)
4725 return false;
4726 gcc_assert (allocate_ptr == NULL_TREE);
4727 if (ctx->allocate_map
4728 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4729 if (tree *allocatorp = ctx->allocate_map->get (var))
4730 allocator = *allocatorp;
4731 if (allocator == NULL_TREE)
4732 return false;
4733 if (!is_ref && omp_privatize_by_reference (var))
4735 allocator = NULL_TREE;
4736 return false;
4739 unsigned HOST_WIDE_INT ialign = 0;
4740 if (TREE_CODE (allocator) == TREE_LIST)
4742 ialign = tree_to_uhwi (TREE_VALUE (allocator));
4743 allocator = TREE_PURPOSE (allocator);
4745 if (TREE_CODE (allocator) != INTEGER_CST)
4746 allocator = build_outer_var_ref (allocator, ctx);
4747 allocator = fold_convert (pointer_sized_int_node, allocator);
4748 if (TREE_CODE (allocator) != INTEGER_CST)
4750 tree var = create_tmp_var (TREE_TYPE (allocator));
4751 gimplify_assign (var, allocator, ilist);
4752 allocator = var;
4755 tree ptr_type, align, sz = size;
4756 if (TYPE_P (new_var))
4758 ptr_type = build_pointer_type (new_var);
4759 ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4761 else if (is_ref)
4763 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4764 ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4766 else
4768 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4769 ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4770 if (sz == NULL_TREE)
4771 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4773 align = build_int_cst (size_type_node, ialign);
4774 if (TREE_CODE (sz) != INTEGER_CST)
4776 tree szvar = create_tmp_var (size_type_node);
4777 gimplify_assign (szvar, sz, ilist);
4778 sz = szvar;
4780 allocate_ptr = create_tmp_var (ptr_type);
4781 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4782 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4783 gimple_call_set_lhs (g, allocate_ptr);
4784 gimple_seq_add_stmt (ilist, g);
4785 if (!is_ref)
4787 tree x = build_simple_mem_ref (allocate_ptr);
4788 TREE_THIS_NOTRAP (x) = 1;
4789 SET_DECL_VALUE_EXPR (new_var, x);
4790 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4792 return true;
4795 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4796 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4797 private variables. Initialization statements go in ILIST, while calls
4798 to destructors go in DLIST. */
4800 static void
4801 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4802 omp_context *ctx, struct omp_for_data *fd)
4804 tree c, copyin_seq, x, ptr;
4805 bool copyin_by_ref = false;
4806 bool lastprivate_firstprivate = false;
4807 bool reduction_omp_orig_ref = false;
4808 int pass;
4809 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4810 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4811 omplow_simd_context sctx = omplow_simd_context ();
4812 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4813 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4814 gimple_seq llist[4] = { };
4815 tree nonconst_simd_if = NULL_TREE;
4817 copyin_seq = NULL;
4818 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4820 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4821 with data sharing clauses referencing variable sized vars. That
4822 is unnecessarily hard to support and very unlikely to result in
4823 vectorized code anyway. */
4824 if (is_simd)
4825 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4826 switch (OMP_CLAUSE_CODE (c))
4828 case OMP_CLAUSE_LINEAR:
4829 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4830 sctx.max_vf = 1;
4831 /* FALLTHRU */
4832 case OMP_CLAUSE_PRIVATE:
4833 case OMP_CLAUSE_FIRSTPRIVATE:
4834 case OMP_CLAUSE_LASTPRIVATE:
4835 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4836 sctx.max_vf = 1;
4837 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4839 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4840 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4841 sctx.max_vf = 1;
4843 break;
4844 case OMP_CLAUSE_REDUCTION:
4845 case OMP_CLAUSE_IN_REDUCTION:
4846 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4847 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4848 sctx.max_vf = 1;
4849 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4851 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4852 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4853 sctx.max_vf = 1;
4855 break;
4856 case OMP_CLAUSE_IF:
4857 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4858 sctx.max_vf = 1;
4859 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4860 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4861 break;
4862 case OMP_CLAUSE_SIMDLEN:
4863 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4864 sctx.max_vf = 1;
4865 break;
4866 case OMP_CLAUSE__CONDTEMP_:
4867 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4868 if (sctx.is_simt)
4869 sctx.max_vf = 1;
4870 break;
4871 default:
4872 continue;
4875 /* Add a placeholder for simduid. */
4876 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4877 sctx.simt_eargs.safe_push (NULL_TREE);
4879 unsigned task_reduction_cnt = 0;
4880 unsigned task_reduction_cntorig = 0;
4881 unsigned task_reduction_cnt_full = 0;
4882 unsigned task_reduction_cntorig_full = 0;
4883 unsigned task_reduction_other_cnt = 0;
4884 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4885 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4886 /* Do all the fixed sized types in the first pass, and the variable sized
4887 types in the second pass. This makes sure that the scalar arguments to
4888 the variable sized types are processed before we use them in the
4889 variable sized operations. For task reductions we use 4 passes, in the
4890 first two we ignore them, in the third one gather arguments for
4891 GOMP_task_reduction_remap call and in the last pass actually handle
4892 the task reductions. */
4893 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4894 ? 4 : 2); ++pass)
4896 if (pass == 2 && task_reduction_cnt)
4898 tskred_atype
4899 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4900 + task_reduction_cntorig);
4901 tskred_avar = create_tmp_var_raw (tskred_atype);
4902 gimple_add_tmp_var (tskred_avar);
4903 TREE_ADDRESSABLE (tskred_avar) = 1;
4904 task_reduction_cnt_full = task_reduction_cnt;
4905 task_reduction_cntorig_full = task_reduction_cntorig;
4907 else if (pass == 3 && task_reduction_cnt)
4909 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4910 gimple *g
4911 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4912 size_int (task_reduction_cntorig),
4913 build_fold_addr_expr (tskred_avar));
4914 gimple_seq_add_stmt (ilist, g);
4916 if (pass == 3 && task_reduction_other_cnt)
4918 /* For reduction clauses, build
4919 tskred_base = (void *) tskred_temp[2]
4920 + omp_get_thread_num () * tskred_temp[1]
4921 or if tskred_temp[1] is known to be constant, that constant
4922 directly. This is the start of the private reduction copy block
4923 for the current thread. */
4924 tree v = create_tmp_var (integer_type_node);
4925 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4926 gimple *g = gimple_build_call (x, 0);
4927 gimple_call_set_lhs (g, v);
4928 gimple_seq_add_stmt (ilist, g);
4929 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4930 tskred_temp = OMP_CLAUSE_DECL (c);
4931 if (is_taskreg_ctx (ctx))
4932 tskred_temp = lookup_decl (tskred_temp, ctx);
4933 tree v2 = create_tmp_var (sizetype);
4934 g = gimple_build_assign (v2, NOP_EXPR, v);
4935 gimple_seq_add_stmt (ilist, g);
4936 if (ctx->task_reductions[0])
4937 v = fold_convert (sizetype, ctx->task_reductions[0]);
4938 else
4939 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4940 tree v3 = create_tmp_var (sizetype);
4941 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4942 gimple_seq_add_stmt (ilist, g);
4943 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4944 tskred_base = create_tmp_var (ptr_type_node);
4945 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4946 gimple_seq_add_stmt (ilist, g);
4948 task_reduction_cnt = 0;
4949 task_reduction_cntorig = 0;
4950 task_reduction_other_cnt = 0;
4951 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4953 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4954 tree var, new_var;
4955 bool by_ref;
4956 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4957 bool task_reduction_p = false;
4958 bool task_reduction_needs_orig_p = false;
4959 tree cond = NULL_TREE;
4960 tree allocator, allocate_ptr;
4962 switch (c_kind)
4964 case OMP_CLAUSE_PRIVATE:
4965 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4966 continue;
4967 break;
4968 case OMP_CLAUSE_SHARED:
4969 /* Ignore shared directives in teams construct inside
4970 of target construct. */
4971 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4972 && !is_host_teams_ctx (ctx))
4973 continue;
4974 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4976 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4977 || is_global_var (OMP_CLAUSE_DECL (c)));
4978 continue;
4980 case OMP_CLAUSE_FIRSTPRIVATE:
4981 case OMP_CLAUSE_COPYIN:
4982 break;
4983 case OMP_CLAUSE_LINEAR:
4984 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4985 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4986 lastprivate_firstprivate = true;
4987 break;
4988 case OMP_CLAUSE_REDUCTION:
4989 case OMP_CLAUSE_IN_REDUCTION:
4990 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
4991 || is_task_ctx (ctx)
4992 || OMP_CLAUSE_REDUCTION_TASK (c))
4994 task_reduction_p = true;
4995 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4997 task_reduction_other_cnt++;
4998 if (pass == 2)
4999 continue;
5001 else
5002 task_reduction_cnt++;
5003 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5005 var = OMP_CLAUSE_DECL (c);
5006 /* If var is a global variable that isn't privatized
5007 in outer contexts, we don't need to look up the
5008 original address, it is always the address of the
5009 global variable itself. */
5010 if (!DECL_P (var)
5011 || omp_privatize_by_reference (var)
5012 || !is_global_var
5013 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5015 task_reduction_needs_orig_p = true;
5016 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5017 task_reduction_cntorig++;
5021 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5022 reduction_omp_orig_ref = true;
5023 break;
5024 case OMP_CLAUSE__REDUCTEMP_:
5025 if (!is_taskreg_ctx (ctx))
5026 continue;
5027 /* FALLTHRU */
5028 case OMP_CLAUSE__LOOPTEMP_:
5029 /* Handle _looptemp_/_reductemp_ clauses only on
5030 parallel/task. */
5031 if (fd)
5032 continue;
5033 break;
5034 case OMP_CLAUSE_LASTPRIVATE:
5035 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5037 lastprivate_firstprivate = true;
5038 if (pass != 0 || is_taskloop_ctx (ctx))
5039 continue;
5041 /* Even without corresponding firstprivate, if
5042 decl is Fortran allocatable, it needs outer var
5043 reference. */
5044 else if (pass == 0
5045 && lang_hooks.decls.omp_private_outer_ref
5046 (OMP_CLAUSE_DECL (c)))
5047 lastprivate_firstprivate = true;
5048 break;
5049 case OMP_CLAUSE_ALIGNED:
5050 if (pass != 1)
5051 continue;
5052 var = OMP_CLAUSE_DECL (c);
5053 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5054 && !is_global_var (var))
5056 new_var = maybe_lookup_decl (var, ctx);
5057 if (new_var == NULL_TREE)
5058 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5059 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5060 tree alarg = omp_clause_aligned_alignment (c);
5061 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5062 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5063 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5064 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5065 gimplify_and_add (x, ilist);
5067 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5068 && is_global_var (var))
5070 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5071 new_var = lookup_decl (var, ctx);
5072 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5073 t = build_fold_addr_expr_loc (clause_loc, t);
5074 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5075 tree alarg = omp_clause_aligned_alignment (c);
5076 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5077 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5078 t = fold_convert_loc (clause_loc, ptype, t);
5079 x = create_tmp_var (ptype);
5080 t = build2 (MODIFY_EXPR, ptype, x, t);
5081 gimplify_and_add (t, ilist);
5082 t = build_simple_mem_ref_loc (clause_loc, x);
5083 SET_DECL_VALUE_EXPR (new_var, t);
5084 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5086 continue;
5087 case OMP_CLAUSE__CONDTEMP_:
5088 if (is_parallel_ctx (ctx)
5089 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5090 break;
5091 continue;
5092 default:
5093 continue;
5096 if (task_reduction_p != (pass >= 2))
5097 continue;
5099 allocator = NULL_TREE;
5100 allocate_ptr = NULL_TREE;
5101 new_var = var = OMP_CLAUSE_DECL (c);
5102 if ((c_kind == OMP_CLAUSE_REDUCTION
5103 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5104 && TREE_CODE (var) == MEM_REF)
5106 var = TREE_OPERAND (var, 0);
5107 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5108 var = TREE_OPERAND (var, 0);
5109 if (TREE_CODE (var) == INDIRECT_REF
5110 || TREE_CODE (var) == ADDR_EXPR)
5111 var = TREE_OPERAND (var, 0);
5112 if (is_variable_sized (var))
5114 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5115 var = DECL_VALUE_EXPR (var);
5116 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5117 var = TREE_OPERAND (var, 0);
5118 gcc_assert (DECL_P (var));
5120 new_var = var;
5122 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5124 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5125 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5127 else if (c_kind != OMP_CLAUSE_COPYIN)
5128 new_var = lookup_decl (var, ctx);
5130 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5132 if (pass != 0)
5133 continue;
5135 /* C/C++ array section reductions. */
5136 else if ((c_kind == OMP_CLAUSE_REDUCTION
5137 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5138 && var != OMP_CLAUSE_DECL (c))
5140 if (pass == 0)
5141 continue;
5143 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5144 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5146 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5148 tree b = TREE_OPERAND (orig_var, 1);
5149 if (is_omp_target (ctx->stmt))
5150 b = NULL_TREE;
5151 else
5152 b = maybe_lookup_decl (b, ctx);
5153 if (b == NULL)
5155 b = TREE_OPERAND (orig_var, 1);
5156 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5158 if (integer_zerop (bias))
5159 bias = b;
5160 else
5162 bias = fold_convert_loc (clause_loc,
5163 TREE_TYPE (b), bias);
5164 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5165 TREE_TYPE (b), b, bias);
5167 orig_var = TREE_OPERAND (orig_var, 0);
5169 if (pass == 2)
5171 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5172 if (is_global_var (out)
5173 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5174 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5175 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5176 != POINTER_TYPE)))
5177 x = var;
5178 else if (is_omp_target (ctx->stmt))
5179 x = out;
5180 else
5182 bool by_ref = use_pointer_for_field (var, NULL);
5183 x = build_receiver_ref (var, by_ref, ctx);
5184 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5185 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5186 == POINTER_TYPE))
5187 x = build_fold_addr_expr (x);
5189 if (TREE_CODE (orig_var) == INDIRECT_REF)
5190 x = build_simple_mem_ref (x);
5191 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5193 if (var == TREE_OPERAND (orig_var, 0))
5194 x = build_fold_addr_expr (x);
5196 bias = fold_convert (sizetype, bias);
5197 x = fold_convert (ptr_type_node, x);
5198 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5199 TREE_TYPE (x), x, bias);
5200 unsigned cnt = task_reduction_cnt - 1;
5201 if (!task_reduction_needs_orig_p)
5202 cnt += (task_reduction_cntorig_full
5203 - task_reduction_cntorig);
5204 else
5205 cnt = task_reduction_cntorig - 1;
5206 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5207 size_int (cnt), NULL_TREE, NULL_TREE);
5208 gimplify_assign (r, x, ilist);
5209 continue;
5212 if (TREE_CODE (orig_var) == INDIRECT_REF
5213 || TREE_CODE (orig_var) == ADDR_EXPR)
5214 orig_var = TREE_OPERAND (orig_var, 0);
5215 tree d = OMP_CLAUSE_DECL (c);
5216 tree type = TREE_TYPE (d);
5217 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5218 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5219 tree sz = v;
5220 const char *name = get_name (orig_var);
5221 if (pass != 3 && !TREE_CONSTANT (v))
5223 tree t;
5224 if (is_omp_target (ctx->stmt))
5225 t = NULL_TREE;
5226 else
5227 t = maybe_lookup_decl (v, ctx);
5228 if (t)
5229 v = t;
5230 else
5231 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5232 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5233 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5234 TREE_TYPE (v), v,
5235 build_int_cst (TREE_TYPE (v), 1));
5236 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5237 TREE_TYPE (v), t,
5238 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5240 if (pass == 3)
5242 tree xv = create_tmp_var (ptr_type_node);
5243 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5245 unsigned cnt = task_reduction_cnt - 1;
5246 if (!task_reduction_needs_orig_p)
5247 cnt += (task_reduction_cntorig_full
5248 - task_reduction_cntorig);
5249 else
5250 cnt = task_reduction_cntorig - 1;
5251 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5252 size_int (cnt), NULL_TREE, NULL_TREE);
5254 gimple *g = gimple_build_assign (xv, x);
5255 gimple_seq_add_stmt (ilist, g);
5257 else
5259 unsigned int idx = *ctx->task_reduction_map->get (c);
5260 tree off;
5261 if (ctx->task_reductions[1 + idx])
5262 off = fold_convert (sizetype,
5263 ctx->task_reductions[1 + idx]);
5264 else
5265 off = task_reduction_read (ilist, tskred_temp, sizetype,
5266 7 + 3 * idx + 1);
5267 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5268 tskred_base, off);
5269 gimple_seq_add_stmt (ilist, g);
5271 x = fold_convert (build_pointer_type (boolean_type_node),
5272 xv);
5273 if (TREE_CONSTANT (v))
5274 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5275 TYPE_SIZE_UNIT (type));
5276 else
5278 tree t;
5279 if (is_omp_target (ctx->stmt))
5280 t = NULL_TREE;
5281 else
5282 t = maybe_lookup_decl (v, ctx);
5283 if (t)
5284 v = t;
5285 else
5286 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5287 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5288 fb_rvalue);
5289 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5290 TREE_TYPE (v), v,
5291 build_int_cst (TREE_TYPE (v), 1));
5292 t = fold_build2_loc (clause_loc, MULT_EXPR,
5293 TREE_TYPE (v), t,
5294 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5295 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5297 cond = create_tmp_var (TREE_TYPE (x));
5298 gimplify_assign (cond, x, ilist);
5299 x = xv;
5301 else if (lower_private_allocate (var, type, allocator,
5302 allocate_ptr, ilist, ctx,
5303 true,
5304 TREE_CONSTANT (v)
5305 ? TYPE_SIZE_UNIT (type)
5306 : sz))
5307 x = allocate_ptr;
5308 else if (TREE_CONSTANT (v))
5310 x = create_tmp_var_raw (type, name);
5311 gimple_add_tmp_var (x);
5312 TREE_ADDRESSABLE (x) = 1;
5313 x = build_fold_addr_expr_loc (clause_loc, x);
5315 else
5317 tree atmp
5318 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5319 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5320 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5323 tree ptype = build_pointer_type (TREE_TYPE (type));
5324 x = fold_convert_loc (clause_loc, ptype, x);
5325 tree y = create_tmp_var (ptype, name);
5326 gimplify_assign (y, x, ilist);
5327 x = y;
5328 tree yb = y;
5330 if (!integer_zerop (bias))
5332 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5333 bias);
5334 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5336 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5337 pointer_sized_int_node, yb, bias);
5338 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5339 yb = create_tmp_var (ptype, name);
5340 gimplify_assign (yb, x, ilist);
5341 x = yb;
5344 d = TREE_OPERAND (d, 0);
5345 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5346 d = TREE_OPERAND (d, 0);
5347 if (TREE_CODE (d) == ADDR_EXPR)
5349 if (orig_var != var)
5351 gcc_assert (is_variable_sized (orig_var));
5352 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5354 gimplify_assign (new_var, x, ilist);
5355 tree new_orig_var = lookup_decl (orig_var, ctx);
5356 tree t = build_fold_indirect_ref (new_var);
5357 DECL_IGNORED_P (new_var) = 0;
5358 TREE_THIS_NOTRAP (t) = 1;
5359 SET_DECL_VALUE_EXPR (new_orig_var, t);
5360 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5362 else
5364 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5365 build_int_cst (ptype, 0));
5366 SET_DECL_VALUE_EXPR (new_var, x);
5367 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5370 else
5372 gcc_assert (orig_var == var);
5373 if (TREE_CODE (d) == INDIRECT_REF)
5375 x = create_tmp_var (ptype, name);
5376 TREE_ADDRESSABLE (x) = 1;
5377 gimplify_assign (x, yb, ilist);
5378 x = build_fold_addr_expr_loc (clause_loc, x);
5380 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5381 gimplify_assign (new_var, x, ilist);
5383 /* GOMP_taskgroup_reduction_register memsets the whole
5384 array to zero. If the initializer is zero, we don't
5385 need to initialize it again, just mark it as ever
5386 used unconditionally, i.e. cond = true. */
5387 if (cond
5388 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5389 && initializer_zerop (omp_reduction_init (c,
5390 TREE_TYPE (type))))
5392 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5393 boolean_true_node);
5394 gimple_seq_add_stmt (ilist, g);
5395 continue;
5397 tree end = create_artificial_label (UNKNOWN_LOCATION);
5398 if (cond)
5400 gimple *g;
5401 if (!is_parallel_ctx (ctx))
5403 tree condv = create_tmp_var (boolean_type_node);
5404 g = gimple_build_assign (condv,
5405 build_simple_mem_ref (cond));
5406 gimple_seq_add_stmt (ilist, g);
5407 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5408 g = gimple_build_cond (NE_EXPR, condv,
5409 boolean_false_node, end, lab1);
5410 gimple_seq_add_stmt (ilist, g);
5411 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5413 g = gimple_build_assign (build_simple_mem_ref (cond),
5414 boolean_true_node);
5415 gimple_seq_add_stmt (ilist, g);
5418 tree y1 = create_tmp_var (ptype);
5419 gimplify_assign (y1, y, ilist);
5420 tree i2 = NULL_TREE, y2 = NULL_TREE;
5421 tree body2 = NULL_TREE, end2 = NULL_TREE;
5422 tree y3 = NULL_TREE, y4 = NULL_TREE;
5423 if (task_reduction_needs_orig_p)
5425 y3 = create_tmp_var (ptype);
5426 tree ref;
5427 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5428 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5429 size_int (task_reduction_cnt_full
5430 + task_reduction_cntorig - 1),
5431 NULL_TREE, NULL_TREE);
5432 else
5434 unsigned int idx = *ctx->task_reduction_map->get (c);
5435 ref = task_reduction_read (ilist, tskred_temp, ptype,
5436 7 + 3 * idx);
5438 gimplify_assign (y3, ref, ilist);
5440 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5442 if (pass != 3)
5444 y2 = create_tmp_var (ptype);
5445 gimplify_assign (y2, y, ilist);
5447 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5449 tree ref = build_outer_var_ref (var, ctx);
5450 /* For ref build_outer_var_ref already performs this. */
5451 if (TREE_CODE (d) == INDIRECT_REF)
5452 gcc_assert (omp_privatize_by_reference (var));
5453 else if (TREE_CODE (d) == ADDR_EXPR)
5454 ref = build_fold_addr_expr (ref);
5455 else if (omp_privatize_by_reference (var))
5456 ref = build_fold_addr_expr (ref);
5457 ref = fold_convert_loc (clause_loc, ptype, ref);
5458 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5459 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5461 y3 = create_tmp_var (ptype);
5462 gimplify_assign (y3, unshare_expr (ref), ilist);
5464 if (is_simd)
5466 y4 = create_tmp_var (ptype);
5467 gimplify_assign (y4, ref, dlist);
5471 tree i = create_tmp_var (TREE_TYPE (v));
5472 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5473 tree body = create_artificial_label (UNKNOWN_LOCATION);
5474 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5475 if (y2)
5477 i2 = create_tmp_var (TREE_TYPE (v));
5478 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5479 body2 = create_artificial_label (UNKNOWN_LOCATION);
5480 end2 = create_artificial_label (UNKNOWN_LOCATION);
5481 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5483 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5485 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5486 tree decl_placeholder
5487 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5488 SET_DECL_VALUE_EXPR (decl_placeholder,
5489 build_simple_mem_ref (y1));
5490 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5491 SET_DECL_VALUE_EXPR (placeholder,
5492 y3 ? build_simple_mem_ref (y3)
5493 : error_mark_node);
5494 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5495 x = lang_hooks.decls.omp_clause_default_ctor
5496 (c, build_simple_mem_ref (y1),
5497 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5498 if (x)
5499 gimplify_and_add (x, ilist);
5500 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5502 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5503 lower_omp (&tseq, ctx);
5504 gimple_seq_add_seq (ilist, tseq);
5506 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5507 if (is_simd)
5509 SET_DECL_VALUE_EXPR (decl_placeholder,
5510 build_simple_mem_ref (y2));
5511 SET_DECL_VALUE_EXPR (placeholder,
5512 build_simple_mem_ref (y4));
5513 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5514 lower_omp (&tseq, ctx);
5515 gimple_seq_add_seq (dlist, tseq);
5516 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5518 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5519 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5520 if (y2)
5522 x = lang_hooks.decls.omp_clause_dtor
5523 (c, build_simple_mem_ref (y2));
5524 if (x)
5525 gimplify_and_add (x, dlist);
5528 else
5530 x = omp_reduction_init (c, TREE_TYPE (type));
5531 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5533 /* reduction(-:var) sums up the partial results, so it
5534 acts identically to reduction(+:var). */
5535 if (code == MINUS_EXPR)
5536 code = PLUS_EXPR;
5538 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5539 if (is_simd)
5541 x = build2 (code, TREE_TYPE (type),
5542 build_simple_mem_ref (y4),
5543 build_simple_mem_ref (y2));
5544 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5547 gimple *g
5548 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5549 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5550 gimple_seq_add_stmt (ilist, g);
5551 if (y3)
5553 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5554 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5555 gimple_seq_add_stmt (ilist, g);
5557 g = gimple_build_assign (i, PLUS_EXPR, i,
5558 build_int_cst (TREE_TYPE (i), 1));
5559 gimple_seq_add_stmt (ilist, g);
5560 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5561 gimple_seq_add_stmt (ilist, g);
5562 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5563 if (y2)
5565 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5566 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5567 gimple_seq_add_stmt (dlist, g);
5568 if (y4)
5570 g = gimple_build_assign
5571 (y4, POINTER_PLUS_EXPR, y4,
5572 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5573 gimple_seq_add_stmt (dlist, g);
5575 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5576 build_int_cst (TREE_TYPE (i2), 1));
5577 gimple_seq_add_stmt (dlist, g);
5578 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5579 gimple_seq_add_stmt (dlist, g);
5580 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5582 if (allocator)
5584 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5585 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5586 gimple_seq_add_stmt (dlist, g);
5588 continue;
5590 else if (pass == 2)
5592 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5593 if (is_global_var (out))
5594 x = var;
5595 else if (is_omp_target (ctx->stmt))
5596 x = out;
5597 else
5599 bool by_ref = use_pointer_for_field (var, ctx);
5600 x = build_receiver_ref (var, by_ref, ctx);
5602 if (!omp_privatize_by_reference (var))
5603 x = build_fold_addr_expr (x);
5604 x = fold_convert (ptr_type_node, x);
5605 unsigned cnt = task_reduction_cnt - 1;
5606 if (!task_reduction_needs_orig_p)
5607 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5608 else
5609 cnt = task_reduction_cntorig - 1;
5610 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5611 size_int (cnt), NULL_TREE, NULL_TREE);
5612 gimplify_assign (r, x, ilist);
5613 continue;
5615 else if (pass == 3)
5617 tree type = TREE_TYPE (new_var);
5618 if (!omp_privatize_by_reference (var))
5619 type = build_pointer_type (type);
5620 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5622 unsigned cnt = task_reduction_cnt - 1;
5623 if (!task_reduction_needs_orig_p)
5624 cnt += (task_reduction_cntorig_full
5625 - task_reduction_cntorig);
5626 else
5627 cnt = task_reduction_cntorig - 1;
5628 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5629 size_int (cnt), NULL_TREE, NULL_TREE);
5631 else
5633 unsigned int idx = *ctx->task_reduction_map->get (c);
5634 tree off;
5635 if (ctx->task_reductions[1 + idx])
5636 off = fold_convert (sizetype,
5637 ctx->task_reductions[1 + idx]);
5638 else
5639 off = task_reduction_read (ilist, tskred_temp, sizetype,
5640 7 + 3 * idx + 1);
5641 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5642 tskred_base, off);
5644 x = fold_convert (type, x);
5645 tree t;
5646 if (omp_privatize_by_reference (var))
5648 gimplify_assign (new_var, x, ilist);
5649 t = new_var;
5650 new_var = build_simple_mem_ref (new_var);
5652 else
5654 t = create_tmp_var (type);
5655 gimplify_assign (t, x, ilist);
5656 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5657 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5659 t = fold_convert (build_pointer_type (boolean_type_node), t);
5660 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5661 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5662 cond = create_tmp_var (TREE_TYPE (t));
5663 gimplify_assign (cond, t, ilist);
5665 else if (is_variable_sized (var))
5667 /* For variable sized types, we need to allocate the
5668 actual storage here. Call alloca and store the
5669 result in the pointer decl that we created elsewhere. */
5670 if (pass == 0)
5671 continue;
5673 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5675 tree tmp;
5677 ptr = DECL_VALUE_EXPR (new_var);
5678 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5679 ptr = TREE_OPERAND (ptr, 0);
5680 gcc_assert (DECL_P (ptr));
5681 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5683 if (lower_private_allocate (var, new_var, allocator,
5684 allocate_ptr, ilist, ctx,
5685 false, x))
5686 tmp = allocate_ptr;
5687 else
5689 /* void *tmp = __builtin_alloca */
5690 tree atmp
5691 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5692 gcall *stmt
5693 = gimple_build_call (atmp, 2, x,
5694 size_int (DECL_ALIGN (var)));
5695 cfun->calls_alloca = 1;
5696 tmp = create_tmp_var_raw (ptr_type_node);
5697 gimple_add_tmp_var (tmp);
5698 gimple_call_set_lhs (stmt, tmp);
5700 gimple_seq_add_stmt (ilist, stmt);
5703 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5704 gimplify_assign (ptr, x, ilist);
5707 else if (omp_privatize_by_reference (var)
5708 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5709 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5711 /* For references that are being privatized for Fortran,
5712 allocate new backing storage for the new pointer
5713 variable. This allows us to avoid changing all the
5714 code that expects a pointer to something that expects
5715 a direct variable. */
5716 if (pass == 0)
5717 continue;
5719 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5720 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5722 x = build_receiver_ref (var, false, ctx);
5723 if (ctx->allocate_map)
5724 if (tree *allocatep = ctx->allocate_map->get (var))
5726 allocator = *allocatep;
5727 if (TREE_CODE (allocator) == TREE_LIST)
5728 allocator = TREE_PURPOSE (allocator);
5729 if (TREE_CODE (allocator) != INTEGER_CST)
5730 allocator = build_outer_var_ref (allocator, ctx);
5731 allocator = fold_convert (pointer_sized_int_node,
5732 allocator);
5733 allocate_ptr = unshare_expr (x);
5735 if (allocator == NULL_TREE)
5736 x = build_fold_addr_expr_loc (clause_loc, x);
5738 else if (lower_private_allocate (var, new_var, allocator,
5739 allocate_ptr,
5740 ilist, ctx, true, x))
5741 x = allocate_ptr;
5742 else if (TREE_CONSTANT (x))
5744 /* For reduction in SIMD loop, defer adding the
5745 initialization of the reference, because if we decide
5746 to use SIMD array for it, the initilization could cause
5747 expansion ICE. Ditto for other privatization clauses. */
5748 if (is_simd)
5749 x = NULL_TREE;
5750 else
5752 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5753 get_name (var));
5754 gimple_add_tmp_var (x);
5755 TREE_ADDRESSABLE (x) = 1;
5756 x = build_fold_addr_expr_loc (clause_loc, x);
5759 else
5761 tree atmp
5762 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5763 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5764 tree al = size_int (TYPE_ALIGN (rtype));
5765 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5768 if (x)
5770 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5771 gimplify_assign (new_var, x, ilist);
5774 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5776 else if ((c_kind == OMP_CLAUSE_REDUCTION
5777 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5778 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5780 if (pass == 0)
5781 continue;
5783 else if (pass != 0)
5784 continue;
5786 switch (OMP_CLAUSE_CODE (c))
5788 case OMP_CLAUSE_SHARED:
5789 /* Ignore shared directives in teams construct inside
5790 target construct. */
5791 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5792 && !is_host_teams_ctx (ctx))
5793 continue;
5794 /* Shared global vars are just accessed directly. */
5795 if (is_global_var (new_var))
5796 break;
5797 /* For taskloop firstprivate/lastprivate, represented
5798 as firstprivate and shared clause on the task, new_var
5799 is the firstprivate var. */
5800 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5801 break;
5802 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5803 needs to be delayed until after fixup_child_record_type so
5804 that we get the correct type during the dereference. */
5805 by_ref = use_pointer_for_field (var, ctx);
5806 x = build_receiver_ref (var, by_ref, ctx);
5807 SET_DECL_VALUE_EXPR (new_var, x);
5808 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5810 /* ??? If VAR is not passed by reference, and the variable
5811 hasn't been initialized yet, then we'll get a warning for
5812 the store into the omp_data_s structure. Ideally, we'd be
5813 able to notice this and not store anything at all, but
5814 we're generating code too early. Suppress the warning. */
5815 if (!by_ref)
5816 suppress_warning (var, OPT_Wuninitialized);
5817 break;
5819 case OMP_CLAUSE__CONDTEMP_:
5820 if (is_parallel_ctx (ctx))
5822 x = build_receiver_ref (var, false, ctx);
5823 SET_DECL_VALUE_EXPR (new_var, x);
5824 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5826 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5828 x = build_zero_cst (TREE_TYPE (var));
5829 goto do_private;
5831 break;
5833 case OMP_CLAUSE_LASTPRIVATE:
5834 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5835 break;
5836 /* FALLTHRU */
5838 case OMP_CLAUSE_PRIVATE:
5839 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5840 x = build_outer_var_ref (var, ctx);
5841 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5843 if (is_task_ctx (ctx))
5844 x = build_receiver_ref (var, false, ctx);
5845 else
5846 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5848 else
5849 x = NULL;
5850 do_private:
5851 tree nx;
5852 bool copy_ctor;
5853 copy_ctor = false;
5854 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5855 ilist, ctx, false, NULL_TREE);
5856 nx = unshare_expr (new_var);
5857 if (is_simd
5858 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5859 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5860 copy_ctor = true;
5861 if (copy_ctor)
5862 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5863 else
5864 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5865 if (is_simd)
5867 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5868 if ((TREE_ADDRESSABLE (new_var) || nx || y
5869 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5870 && (gimple_omp_for_collapse (ctx->stmt) != 1
5871 || (gimple_omp_for_index (ctx->stmt, 0)
5872 != new_var)))
5873 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5874 || omp_privatize_by_reference (var))
5875 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5876 ivar, lvar))
5878 if (omp_privatize_by_reference (var))
5880 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5881 tree new_vard = TREE_OPERAND (new_var, 0);
5882 gcc_assert (DECL_P (new_vard));
5883 SET_DECL_VALUE_EXPR (new_vard,
5884 build_fold_addr_expr (lvar));
5885 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5888 if (nx)
5890 tree iv = unshare_expr (ivar);
5891 if (copy_ctor)
5892 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5894 else
5895 x = lang_hooks.decls.omp_clause_default_ctor (c,
5899 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5901 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5902 unshare_expr (ivar), x);
5903 nx = x;
5905 if (nx && x)
5906 gimplify_and_add (x, &llist[0]);
5907 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5908 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5910 tree v = new_var;
5911 if (!DECL_P (v))
5913 gcc_assert (TREE_CODE (v) == MEM_REF);
5914 v = TREE_OPERAND (v, 0);
5915 gcc_assert (DECL_P (v));
5917 v = *ctx->lastprivate_conditional_map->get (v);
5918 tree t = create_tmp_var (TREE_TYPE (v));
5919 tree z = build_zero_cst (TREE_TYPE (v));
5920 tree orig_v
5921 = build_outer_var_ref (var, ctx,
5922 OMP_CLAUSE_LASTPRIVATE);
5923 gimple_seq_add_stmt (dlist,
5924 gimple_build_assign (t, z));
5925 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5926 tree civar = DECL_VALUE_EXPR (v);
5927 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5928 civar = unshare_expr (civar);
5929 TREE_OPERAND (civar, 1) = sctx.idx;
5930 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5931 unshare_expr (civar));
5932 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5933 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5934 orig_v, unshare_expr (ivar)));
5935 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5936 civar);
5937 x = build3 (COND_EXPR, void_type_node, cond, x,
5938 void_node);
5939 gimple_seq tseq = NULL;
5940 gimplify_and_add (x, &tseq);
5941 if (ctx->outer)
5942 lower_omp (&tseq, ctx->outer);
5943 gimple_seq_add_seq (&llist[1], tseq);
5945 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5946 && ctx->for_simd_scan_phase)
5948 x = unshare_expr (ivar);
5949 tree orig_v
5950 = build_outer_var_ref (var, ctx,
5951 OMP_CLAUSE_LASTPRIVATE);
5952 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5953 orig_v);
5954 gimplify_and_add (x, &llist[0]);
5956 if (y)
5958 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5959 if (y)
5960 gimplify_and_add (y, &llist[1]);
5962 break;
5964 if (omp_privatize_by_reference (var))
5966 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5967 tree new_vard = TREE_OPERAND (new_var, 0);
5968 gcc_assert (DECL_P (new_vard));
5969 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5970 x = TYPE_SIZE_UNIT (type);
5971 if (TREE_CONSTANT (x))
5973 x = create_tmp_var_raw (type, get_name (var));
5974 gimple_add_tmp_var (x);
5975 TREE_ADDRESSABLE (x) = 1;
5976 x = build_fold_addr_expr_loc (clause_loc, x);
5977 x = fold_convert_loc (clause_loc,
5978 TREE_TYPE (new_vard), x);
5979 gimplify_assign (new_vard, x, ilist);
5983 if (nx)
5984 gimplify_and_add (nx, ilist);
5985 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5986 && is_simd
5987 && ctx->for_simd_scan_phase)
5989 tree orig_v = build_outer_var_ref (var, ctx,
5990 OMP_CLAUSE_LASTPRIVATE);
5991 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5992 orig_v);
5993 gimplify_and_add (x, ilist);
5995 /* FALLTHRU */
5997 do_dtor:
5998 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5999 if (x)
6000 gimplify_and_add (x, dlist);
6001 if (allocator)
6003 if (!is_gimple_val (allocator))
6005 tree avar = create_tmp_var (TREE_TYPE (allocator));
6006 gimplify_assign (avar, allocator, dlist);
6007 allocator = avar;
6009 if (!is_gimple_val (allocate_ptr))
6011 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6012 gimplify_assign (apvar, allocate_ptr, dlist);
6013 allocate_ptr = apvar;
6015 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
6016 gimple *g
6017 = gimple_build_call (f, 2, allocate_ptr, allocator);
6018 gimple_seq_add_stmt (dlist, g);
6020 break;
6022 case OMP_CLAUSE_LINEAR:
6023 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6024 goto do_firstprivate;
6025 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6026 x = NULL;
6027 else
6028 x = build_outer_var_ref (var, ctx);
6029 goto do_private;
6031 case OMP_CLAUSE_FIRSTPRIVATE:
6032 if (is_task_ctx (ctx))
6034 if ((omp_privatize_by_reference (var)
6035 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6036 || is_variable_sized (var))
6037 goto do_dtor;
6038 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
6039 ctx))
6040 || use_pointer_for_field (var, NULL))
6042 x = build_receiver_ref (var, false, ctx);
6043 if (ctx->allocate_map)
6044 if (tree *allocatep = ctx->allocate_map->get (var))
6046 allocator = *allocatep;
6047 if (TREE_CODE (allocator) == TREE_LIST)
6048 allocator = TREE_PURPOSE (allocator);
6049 if (TREE_CODE (allocator) != INTEGER_CST)
6050 allocator = build_outer_var_ref (allocator, ctx);
6051 allocator = fold_convert (pointer_sized_int_node,
6052 allocator);
6053 allocate_ptr = unshare_expr (x);
6054 x = build_simple_mem_ref (x);
6055 TREE_THIS_NOTRAP (x) = 1;
6057 SET_DECL_VALUE_EXPR (new_var, x);
6058 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6059 goto do_dtor;
6062 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6063 && omp_privatize_by_reference (var))
6065 x = build_outer_var_ref (var, ctx);
6066 gcc_assert (TREE_CODE (x) == MEM_REF
6067 && integer_zerop (TREE_OPERAND (x, 1)));
6068 x = TREE_OPERAND (x, 0);
6069 x = lang_hooks.decls.omp_clause_copy_ctor
6070 (c, unshare_expr (new_var), x);
6071 gimplify_and_add (x, ilist);
6072 goto do_dtor;
6074 do_firstprivate:
6075 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6076 ilist, ctx, false, NULL_TREE);
6077 x = build_outer_var_ref (var, ctx);
6078 if (is_simd)
6080 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6081 && gimple_omp_for_combined_into_p (ctx->stmt))
6083 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6084 tree stept = TREE_TYPE (t);
6085 tree ct = omp_find_clause (clauses,
6086 OMP_CLAUSE__LOOPTEMP_);
6087 gcc_assert (ct);
6088 tree l = OMP_CLAUSE_DECL (ct);
6089 tree n1 = fd->loop.n1;
6090 tree step = fd->loop.step;
6091 tree itype = TREE_TYPE (l);
6092 if (POINTER_TYPE_P (itype))
6093 itype = signed_type_for (itype);
6094 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6095 if (TYPE_UNSIGNED (itype)
6096 && fd->loop.cond_code == GT_EXPR)
6097 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6098 fold_build1 (NEGATE_EXPR, itype, l),
6099 fold_build1 (NEGATE_EXPR,
6100 itype, step));
6101 else
6102 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6103 t = fold_build2 (MULT_EXPR, stept,
6104 fold_convert (stept, l), t);
6106 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6108 if (omp_privatize_by_reference (var))
6110 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6111 tree new_vard = TREE_OPERAND (new_var, 0);
6112 gcc_assert (DECL_P (new_vard));
6113 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6114 nx = TYPE_SIZE_UNIT (type);
6115 if (TREE_CONSTANT (nx))
6117 nx = create_tmp_var_raw (type,
6118 get_name (var));
6119 gimple_add_tmp_var (nx);
6120 TREE_ADDRESSABLE (nx) = 1;
6121 nx = build_fold_addr_expr_loc (clause_loc,
6122 nx);
6123 nx = fold_convert_loc (clause_loc,
6124 TREE_TYPE (new_vard),
6125 nx);
6126 gimplify_assign (new_vard, nx, ilist);
6130 x = lang_hooks.decls.omp_clause_linear_ctor
6131 (c, new_var, x, t);
6132 gimplify_and_add (x, ilist);
6133 goto do_dtor;
6136 if (POINTER_TYPE_P (TREE_TYPE (x)))
6137 x = fold_build2 (POINTER_PLUS_EXPR,
6138 TREE_TYPE (x), x, t);
6139 else
6140 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
6143 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6144 || TREE_ADDRESSABLE (new_var)
6145 || omp_privatize_by_reference (var))
6146 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6147 ivar, lvar))
6149 if (omp_privatize_by_reference (var))
6151 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6152 tree new_vard = TREE_OPERAND (new_var, 0);
6153 gcc_assert (DECL_P (new_vard));
6154 SET_DECL_VALUE_EXPR (new_vard,
6155 build_fold_addr_expr (lvar));
6156 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6158 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6160 tree iv = create_tmp_var (TREE_TYPE (new_var));
6161 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6162 gimplify_and_add (x, ilist);
6163 gimple_stmt_iterator gsi
6164 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6165 gassign *g
6166 = gimple_build_assign (unshare_expr (lvar), iv);
6167 gsi_insert_before_without_update (&gsi, g,
6168 GSI_SAME_STMT);
6169 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6170 enum tree_code code = PLUS_EXPR;
6171 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6172 code = POINTER_PLUS_EXPR;
6173 g = gimple_build_assign (iv, code, iv, t);
6174 gsi_insert_before_without_update (&gsi, g,
6175 GSI_SAME_STMT);
6176 break;
6178 x = lang_hooks.decls.omp_clause_copy_ctor
6179 (c, unshare_expr (ivar), x);
6180 gimplify_and_add (x, &llist[0]);
6181 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6182 if (x)
6183 gimplify_and_add (x, &llist[1]);
6184 break;
6186 if (omp_privatize_by_reference (var))
6188 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6189 tree new_vard = TREE_OPERAND (new_var, 0);
6190 gcc_assert (DECL_P (new_vard));
6191 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6192 nx = TYPE_SIZE_UNIT (type);
6193 if (TREE_CONSTANT (nx))
6195 nx = create_tmp_var_raw (type, get_name (var));
6196 gimple_add_tmp_var (nx);
6197 TREE_ADDRESSABLE (nx) = 1;
6198 nx = build_fold_addr_expr_loc (clause_loc, nx);
6199 nx = fold_convert_loc (clause_loc,
6200 TREE_TYPE (new_vard), nx);
6201 gimplify_assign (new_vard, nx, ilist);
6205 x = lang_hooks.decls.omp_clause_copy_ctor
6206 (c, unshare_expr (new_var), x);
6207 gimplify_and_add (x, ilist);
6208 goto do_dtor;
6210 case OMP_CLAUSE__LOOPTEMP_:
6211 case OMP_CLAUSE__REDUCTEMP_:
6212 gcc_assert (is_taskreg_ctx (ctx));
6213 x = build_outer_var_ref (var, ctx);
6214 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6215 gimplify_and_add (x, ilist);
6216 break;
6218 case OMP_CLAUSE_COPYIN:
6219 by_ref = use_pointer_for_field (var, NULL);
6220 x = build_receiver_ref (var, by_ref, ctx);
6221 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6222 append_to_statement_list (x, &copyin_seq);
6223 copyin_by_ref |= by_ref;
6224 break;
6226 case OMP_CLAUSE_REDUCTION:
6227 case OMP_CLAUSE_IN_REDUCTION:
6228 /* OpenACC reductions are initialized using the
6229 GOACC_REDUCTION internal function. */
6230 if (is_gimple_omp_oacc (ctx->stmt))
6231 break;
6232 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6234 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6235 gimple *tseq;
6236 tree ptype = TREE_TYPE (placeholder);
6237 if (cond)
6239 x = error_mark_node;
6240 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6241 && !task_reduction_needs_orig_p)
6242 x = var;
6243 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6245 tree pptype = build_pointer_type (ptype);
6246 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6247 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6248 size_int (task_reduction_cnt_full
6249 + task_reduction_cntorig - 1),
6250 NULL_TREE, NULL_TREE);
6251 else
6253 unsigned int idx
6254 = *ctx->task_reduction_map->get (c);
6255 x = task_reduction_read (ilist, tskred_temp,
6256 pptype, 7 + 3 * idx);
6258 x = fold_convert (pptype, x);
6259 x = build_simple_mem_ref (x);
6262 else
6264 lower_private_allocate (var, new_var, allocator,
6265 allocate_ptr, ilist, ctx, false,
6266 NULL_TREE);
6267 x = build_outer_var_ref (var, ctx);
6269 if (omp_privatize_by_reference (var)
6270 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6271 x = build_fold_addr_expr_loc (clause_loc, x);
6273 SET_DECL_VALUE_EXPR (placeholder, x);
6274 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6275 tree new_vard = new_var;
6276 if (omp_privatize_by_reference (var))
6278 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6279 new_vard = TREE_OPERAND (new_var, 0);
6280 gcc_assert (DECL_P (new_vard));
6282 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6283 if (is_simd
6284 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6285 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6286 rvarp = &rvar;
6287 if (is_simd
6288 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6289 ivar, lvar, rvarp,
6290 &rvar2))
6292 if (new_vard == new_var)
6294 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6295 SET_DECL_VALUE_EXPR (new_var, ivar);
6297 else
6299 SET_DECL_VALUE_EXPR (new_vard,
6300 build_fold_addr_expr (ivar));
6301 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6303 x = lang_hooks.decls.omp_clause_default_ctor
6304 (c, unshare_expr (ivar),
6305 build_outer_var_ref (var, ctx));
6306 if (rvarp && ctx->for_simd_scan_phase)
6308 if (x)
6309 gimplify_and_add (x, &llist[0]);
6310 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6311 if (x)
6312 gimplify_and_add (x, &llist[1]);
6313 break;
6315 else if (rvarp)
6317 if (x)
6319 gimplify_and_add (x, &llist[0]);
6321 tree ivar2 = unshare_expr (lvar);
6322 TREE_OPERAND (ivar2, 1) = sctx.idx;
6323 x = lang_hooks.decls.omp_clause_default_ctor
6324 (c, ivar2, build_outer_var_ref (var, ctx));
6325 gimplify_and_add (x, &llist[0]);
6327 if (rvar2)
6329 x = lang_hooks.decls.omp_clause_default_ctor
6330 (c, unshare_expr (rvar2),
6331 build_outer_var_ref (var, ctx));
6332 gimplify_and_add (x, &llist[0]);
6335 /* For types that need construction, add another
6336 private var which will be default constructed
6337 and optionally initialized with
6338 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6339 loop we want to assign this value instead of
6340 constructing and destructing it in each
6341 iteration. */
6342 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6343 gimple_add_tmp_var (nv);
6344 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6345 ? rvar2
6346 : ivar, 0),
6347 nv);
6348 x = lang_hooks.decls.omp_clause_default_ctor
6349 (c, nv, build_outer_var_ref (var, ctx));
6350 gimplify_and_add (x, ilist);
6352 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6354 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6355 x = DECL_VALUE_EXPR (new_vard);
6356 tree vexpr = nv;
6357 if (new_vard != new_var)
6358 vexpr = build_fold_addr_expr (nv);
6359 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6360 lower_omp (&tseq, ctx);
6361 SET_DECL_VALUE_EXPR (new_vard, x);
6362 gimple_seq_add_seq (ilist, tseq);
6363 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6366 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6367 if (x)
6368 gimplify_and_add (x, dlist);
6371 tree ref = build_outer_var_ref (var, ctx);
6372 x = unshare_expr (ivar);
6373 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6374 ref);
6375 gimplify_and_add (x, &llist[0]);
6377 ref = build_outer_var_ref (var, ctx);
6378 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6379 rvar);
6380 gimplify_and_add (x, &llist[3]);
6382 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6383 if (new_vard == new_var)
6384 SET_DECL_VALUE_EXPR (new_var, lvar);
6385 else
6386 SET_DECL_VALUE_EXPR (new_vard,
6387 build_fold_addr_expr (lvar));
6389 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6390 if (x)
6391 gimplify_and_add (x, &llist[1]);
6393 tree ivar2 = unshare_expr (lvar);
6394 TREE_OPERAND (ivar2, 1) = sctx.idx;
6395 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6396 if (x)
6397 gimplify_and_add (x, &llist[1]);
6399 if (rvar2)
6401 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6402 if (x)
6403 gimplify_and_add (x, &llist[1]);
6405 break;
6407 if (x)
6408 gimplify_and_add (x, &llist[0]);
6409 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6411 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6412 lower_omp (&tseq, ctx);
6413 gimple_seq_add_seq (&llist[0], tseq);
6415 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6416 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6417 lower_omp (&tseq, ctx);
6418 gimple_seq_add_seq (&llist[1], tseq);
6419 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6420 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6421 if (new_vard == new_var)
6422 SET_DECL_VALUE_EXPR (new_var, lvar);
6423 else
6424 SET_DECL_VALUE_EXPR (new_vard,
6425 build_fold_addr_expr (lvar));
6426 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6427 if (x)
6428 gimplify_and_add (x, &llist[1]);
6429 break;
6431 /* If this is a reference to constant size reduction var
6432 with placeholder, we haven't emitted the initializer
6433 for it because it is undesirable if SIMD arrays are used.
6434 But if they aren't used, we need to emit the deferred
6435 initialization now. */
6436 else if (omp_privatize_by_reference (var) && is_simd)
6437 handle_simd_reference (clause_loc, new_vard, ilist);
6439 tree lab2 = NULL_TREE;
6440 if (cond)
6442 gimple *g;
6443 if (!is_parallel_ctx (ctx))
6445 tree condv = create_tmp_var (boolean_type_node);
6446 tree m = build_simple_mem_ref (cond);
6447 g = gimple_build_assign (condv, m);
6448 gimple_seq_add_stmt (ilist, g);
6449 tree lab1
6450 = create_artificial_label (UNKNOWN_LOCATION);
6451 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6452 g = gimple_build_cond (NE_EXPR, condv,
6453 boolean_false_node,
6454 lab2, lab1);
6455 gimple_seq_add_stmt (ilist, g);
6456 gimple_seq_add_stmt (ilist,
6457 gimple_build_label (lab1));
6459 g = gimple_build_assign (build_simple_mem_ref (cond),
6460 boolean_true_node);
6461 gimple_seq_add_stmt (ilist, g);
6463 x = lang_hooks.decls.omp_clause_default_ctor
6464 (c, unshare_expr (new_var),
6465 cond ? NULL_TREE
6466 : build_outer_var_ref (var, ctx));
6467 if (x)
6468 gimplify_and_add (x, ilist);
6470 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6471 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6473 if (ctx->for_simd_scan_phase)
6474 goto do_dtor;
6475 if (x || (!is_simd
6476 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6478 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6479 gimple_add_tmp_var (nv);
6480 ctx->cb.decl_map->put (new_vard, nv);
6481 x = lang_hooks.decls.omp_clause_default_ctor
6482 (c, nv, build_outer_var_ref (var, ctx));
6483 if (x)
6484 gimplify_and_add (x, ilist);
6485 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6487 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6488 tree vexpr = nv;
6489 if (new_vard != new_var)
6490 vexpr = build_fold_addr_expr (nv);
6491 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6492 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6493 lower_omp (&tseq, ctx);
6494 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6495 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6496 gimple_seq_add_seq (ilist, tseq);
6498 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6499 if (is_simd && ctx->scan_exclusive)
6501 tree nv2
6502 = create_tmp_var_raw (TREE_TYPE (new_var));
6503 gimple_add_tmp_var (nv2);
6504 ctx->cb.decl_map->put (nv, nv2);
6505 x = lang_hooks.decls.omp_clause_default_ctor
6506 (c, nv2, build_outer_var_ref (var, ctx));
6507 gimplify_and_add (x, ilist);
6508 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6509 if (x)
6510 gimplify_and_add (x, dlist);
6512 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6513 if (x)
6514 gimplify_and_add (x, dlist);
6516 else if (is_simd
6517 && ctx->scan_exclusive
6518 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6520 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6521 gimple_add_tmp_var (nv2);
6522 ctx->cb.decl_map->put (new_vard, nv2);
6523 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6524 if (x)
6525 gimplify_and_add (x, dlist);
6527 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6528 goto do_dtor;
6531 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6533 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6534 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6535 && is_omp_target (ctx->stmt))
6537 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6538 tree oldv = NULL_TREE;
6539 gcc_assert (d);
6540 if (DECL_HAS_VALUE_EXPR_P (d))
6541 oldv = DECL_VALUE_EXPR (d);
6542 SET_DECL_VALUE_EXPR (d, new_vard);
6543 DECL_HAS_VALUE_EXPR_P (d) = 1;
6544 lower_omp (&tseq, ctx);
6545 if (oldv)
6546 SET_DECL_VALUE_EXPR (d, oldv);
6547 else
6549 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6550 DECL_HAS_VALUE_EXPR_P (d) = 0;
6553 else
6554 lower_omp (&tseq, ctx);
6555 gimple_seq_add_seq (ilist, tseq);
6557 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6558 if (is_simd)
6560 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6561 lower_omp (&tseq, ctx);
6562 gimple_seq_add_seq (dlist, tseq);
6563 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6565 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6566 if (cond)
6568 if (lab2)
6569 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6570 break;
6572 goto do_dtor;
6574 else
6576 x = omp_reduction_init (c, TREE_TYPE (new_var));
6577 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6578 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6580 if (cond)
6582 gimple *g;
6583 tree lab2 = NULL_TREE;
6584 /* GOMP_taskgroup_reduction_register memsets the whole
6585 array to zero. If the initializer is zero, we don't
6586 need to initialize it again, just mark it as ever
6587 used unconditionally, i.e. cond = true. */
6588 if (initializer_zerop (x))
6590 g = gimple_build_assign (build_simple_mem_ref (cond),
6591 boolean_true_node);
6592 gimple_seq_add_stmt (ilist, g);
6593 break;
6596 /* Otherwise, emit
6597 if (!cond) { cond = true; new_var = x; } */
6598 if (!is_parallel_ctx (ctx))
6600 tree condv = create_tmp_var (boolean_type_node);
6601 tree m = build_simple_mem_ref (cond);
6602 g = gimple_build_assign (condv, m);
6603 gimple_seq_add_stmt (ilist, g);
6604 tree lab1
6605 = create_artificial_label (UNKNOWN_LOCATION);
6606 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6607 g = gimple_build_cond (NE_EXPR, condv,
6608 boolean_false_node,
6609 lab2, lab1);
6610 gimple_seq_add_stmt (ilist, g);
6611 gimple_seq_add_stmt (ilist,
6612 gimple_build_label (lab1));
6614 g = gimple_build_assign (build_simple_mem_ref (cond),
6615 boolean_true_node);
6616 gimple_seq_add_stmt (ilist, g);
6617 gimplify_assign (new_var, x, ilist);
6618 if (lab2)
6619 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6620 break;
6623 /* reduction(-:var) sums up the partial results, so it
6624 acts identically to reduction(+:var). */
6625 if (code == MINUS_EXPR)
6626 code = PLUS_EXPR;
6628 bool is_truth_op
6629 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6630 tree new_vard = new_var;
6631 if (is_simd && omp_privatize_by_reference (var))
6633 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6634 new_vard = TREE_OPERAND (new_var, 0);
6635 gcc_assert (DECL_P (new_vard));
6637 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6638 if (is_simd
6639 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6640 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6641 rvarp = &rvar;
6642 if (is_simd
6643 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6644 ivar, lvar, rvarp,
6645 &rvar2))
6647 if (new_vard != new_var)
6649 SET_DECL_VALUE_EXPR (new_vard,
6650 build_fold_addr_expr (lvar));
6651 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6654 tree ref = build_outer_var_ref (var, ctx);
6656 if (rvarp)
6658 if (ctx->for_simd_scan_phase)
6659 break;
6660 gimplify_assign (ivar, ref, &llist[0]);
6661 ref = build_outer_var_ref (var, ctx);
6662 gimplify_assign (ref, rvar, &llist[3]);
6663 break;
6666 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6668 if (sctx.is_simt)
6670 if (!simt_lane)
6671 simt_lane = create_tmp_var (unsigned_type_node);
6672 x = build_call_expr_internal_loc
6673 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6674 TREE_TYPE (ivar), 2, ivar, simt_lane);
6675 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6676 gimplify_assign (ivar, x, &llist[2]);
6678 tree ivar2 = ivar;
6679 tree ref2 = ref;
6680 if (is_truth_op)
6682 tree zero = build_zero_cst (TREE_TYPE (ivar));
6683 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6684 boolean_type_node, ivar,
6685 zero);
6686 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6687 boolean_type_node, ref,
6688 zero);
6690 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6691 if (is_truth_op)
6692 x = fold_convert (TREE_TYPE (ref), x);
6693 ref = build_outer_var_ref (var, ctx);
6694 gimplify_assign (ref, x, &llist[1]);
6697 else
6699 lower_private_allocate (var, new_var, allocator,
6700 allocate_ptr, ilist, ctx,
6701 false, NULL_TREE);
6702 if (omp_privatize_by_reference (var) && is_simd)
6703 handle_simd_reference (clause_loc, new_vard, ilist);
6704 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6705 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6706 break;
6707 gimplify_assign (new_var, x, ilist);
6708 if (is_simd)
6710 tree ref = build_outer_var_ref (var, ctx);
6711 tree new_var2 = new_var;
6712 tree ref2 = ref;
6713 if (is_truth_op)
6715 tree zero = build_zero_cst (TREE_TYPE (new_var));
6716 new_var2
6717 = fold_build2_loc (clause_loc, NE_EXPR,
6718 boolean_type_node, new_var,
6719 zero);
6720 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6721 boolean_type_node, ref,
6722 zero);
6724 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6725 if (is_truth_op)
6726 x = fold_convert (TREE_TYPE (new_var), x);
6727 ref = build_outer_var_ref (var, ctx);
6728 gimplify_assign (ref, x, dlist);
6730 if (allocator)
6731 goto do_dtor;
6734 break;
6736 default:
6737 gcc_unreachable ();
6741 if (tskred_avar)
6743 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6744 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6747 if (known_eq (sctx.max_vf, 1U))
6749 sctx.is_simt = false;
6750 if (ctx->lastprivate_conditional_map)
6752 if (gimple_omp_for_combined_into_p (ctx->stmt))
6754 /* Signal to lower_omp_1 that it should use parent context. */
6755 ctx->combined_into_simd_safelen1 = true;
6756 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6757 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6758 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6760 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6761 omp_context *outer = ctx->outer;
6762 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6763 outer = outer->outer;
6764 tree *v = ctx->lastprivate_conditional_map->get (o);
6765 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6766 tree *pv = outer->lastprivate_conditional_map->get (po);
6767 *v = *pv;
6770 else
6772 /* When not vectorized, treat lastprivate(conditional:) like
6773 normal lastprivate, as there will be just one simd lane
6774 writing the privatized variable. */
6775 delete ctx->lastprivate_conditional_map;
6776 ctx->lastprivate_conditional_map = NULL;
6781 if (nonconst_simd_if)
6783 if (sctx.lane == NULL_TREE)
6785 sctx.idx = create_tmp_var (unsigned_type_node);
6786 sctx.lane = create_tmp_var (unsigned_type_node);
6788 /* FIXME: For now. */
6789 sctx.is_simt = false;
6792 if (sctx.lane || sctx.is_simt)
6794 uid = create_tmp_var (ptr_type_node, "simduid");
6795 /* Don't want uninit warnings on simduid, it is always uninitialized,
6796 but we use it not for the value, but for the DECL_UID only. */
6797 suppress_warning (uid, OPT_Wuninitialized);
6798 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6799 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6800 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6801 gimple_omp_for_set_clauses (ctx->stmt, c);
6803 /* Emit calls denoting privatized variables and initializing a pointer to
6804 structure that holds private variables as fields after ompdevlow pass. */
6805 if (sctx.is_simt)
6807 sctx.simt_eargs[0] = uid;
6808 gimple *g
6809 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6810 gimple_call_set_lhs (g, uid);
6811 gimple_seq_add_stmt (ilist, g);
6812 sctx.simt_eargs.release ();
6814 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6815 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6816 gimple_call_set_lhs (g, simtrec);
6817 gimple_seq_add_stmt (ilist, g);
6819 if (sctx.lane)
6821 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6822 2 + (nonconst_simd_if != NULL),
6823 uid, integer_zero_node,
6824 nonconst_simd_if);
6825 gimple_call_set_lhs (g, sctx.lane);
6826 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6827 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6828 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6829 build_int_cst (unsigned_type_node, 0));
6830 gimple_seq_add_stmt (ilist, g);
6831 if (sctx.lastlane)
6833 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6834 2, uid, sctx.lane);
6835 gimple_call_set_lhs (g, sctx.lastlane);
6836 gimple_seq_add_stmt (dlist, g);
6837 gimple_seq_add_seq (dlist, llist[3]);
6839 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6840 if (llist[2])
6842 tree simt_vf = create_tmp_var (unsigned_type_node);
6843 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6844 gimple_call_set_lhs (g, simt_vf);
6845 gimple_seq_add_stmt (dlist, g);
6847 tree t = build_int_cst (unsigned_type_node, 1);
6848 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6849 gimple_seq_add_stmt (dlist, g);
6851 t = build_int_cst (unsigned_type_node, 0);
6852 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6853 gimple_seq_add_stmt (dlist, g);
6855 tree body = create_artificial_label (UNKNOWN_LOCATION);
6856 tree header = create_artificial_label (UNKNOWN_LOCATION);
6857 tree end = create_artificial_label (UNKNOWN_LOCATION);
6858 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6859 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6861 gimple_seq_add_seq (dlist, llist[2]);
6863 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6864 gimple_seq_add_stmt (dlist, g);
6866 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6867 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6868 gimple_seq_add_stmt (dlist, g);
6870 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6872 for (int i = 0; i < 2; i++)
6873 if (llist[i])
6875 tree vf = create_tmp_var (unsigned_type_node);
6876 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6877 gimple_call_set_lhs (g, vf);
6878 gimple_seq *seq = i == 0 ? ilist : dlist;
6879 gimple_seq_add_stmt (seq, g);
6880 tree t = build_int_cst (unsigned_type_node, 0);
6881 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6882 gimple_seq_add_stmt (seq, g);
6883 tree body = create_artificial_label (UNKNOWN_LOCATION);
6884 tree header = create_artificial_label (UNKNOWN_LOCATION);
6885 tree end = create_artificial_label (UNKNOWN_LOCATION);
6886 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6887 gimple_seq_add_stmt (seq, gimple_build_label (body));
6888 gimple_seq_add_seq (seq, llist[i]);
6889 t = build_int_cst (unsigned_type_node, 1);
6890 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6891 gimple_seq_add_stmt (seq, g);
6892 gimple_seq_add_stmt (seq, gimple_build_label (header));
6893 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6894 gimple_seq_add_stmt (seq, g);
6895 gimple_seq_add_stmt (seq, gimple_build_label (end));
6898 if (sctx.is_simt)
6900 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6901 gimple *g
6902 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6903 gimple_seq_add_stmt (dlist, g);
6906 /* The copyin sequence is not to be executed by the main thread, since
6907 that would result in self-copies. Perhaps not visible to scalars,
6908 but it certainly is to C++ operator=. */
6909 if (copyin_seq)
6911 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6913 x = build2 (NE_EXPR, boolean_type_node, x,
6914 build_int_cst (TREE_TYPE (x), 0));
6915 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6916 gimplify_and_add (x, ilist);
6919 /* If any copyin variable is passed by reference, we must ensure the
6920 master thread doesn't modify it before it is copied over in all
6921 threads. Similarly for variables in both firstprivate and
6922 lastprivate clauses we need to ensure the lastprivate copying
6923 happens after firstprivate copying in all threads. And similarly
6924 for UDRs if initializer expression refers to omp_orig. */
6925 if (copyin_by_ref || lastprivate_firstprivate
6926 || (reduction_omp_orig_ref
6927 && !ctx->scan_inclusive
6928 && !ctx->scan_exclusive))
6930 /* Don't add any barrier for #pragma omp simd or
6931 #pragma omp distribute. */
6932 if (!is_task_ctx (ctx)
6933 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6934 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6935 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6938 /* If max_vf is non-zero, then we can use only a vectorization factor
6939 up to the max_vf we chose. So stick it into the safelen clause. */
6940 if (maybe_ne (sctx.max_vf, 0U))
6942 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6943 OMP_CLAUSE_SAFELEN);
6944 poly_uint64 safe_len;
6945 if (c == NULL_TREE
6946 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6947 && maybe_gt (safe_len, sctx.max_vf)))
6949 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6950 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6951 sctx.max_vf);
6952 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6953 gimple_omp_for_set_clauses (ctx->stmt, c);
6958 /* Create temporary variables for lastprivate(conditional:) implementation
6959 in context CTX with CLAUSES. */
6961 static void
6962 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6964 tree iter_type = NULL_TREE;
6965 tree cond_ptr = NULL_TREE;
6966 tree iter_var = NULL_TREE;
6967 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6968 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6969 tree next = *clauses;
6970 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6971 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6972 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6974 if (is_simd)
6976 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6977 gcc_assert (cc);
6978 if (iter_type == NULL_TREE)
6980 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6981 iter_var = create_tmp_var_raw (iter_type);
6982 DECL_CONTEXT (iter_var) = current_function_decl;
6983 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6984 DECL_CHAIN (iter_var) = ctx->block_vars;
6985 ctx->block_vars = iter_var;
6986 tree c3
6987 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6988 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6989 OMP_CLAUSE_DECL (c3) = iter_var;
6990 OMP_CLAUSE_CHAIN (c3) = *clauses;
6991 *clauses = c3;
6992 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6994 next = OMP_CLAUSE_CHAIN (cc);
6995 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6996 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6997 ctx->lastprivate_conditional_map->put (o, v);
6998 continue;
7000 if (iter_type == NULL)
7002 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
7004 struct omp_for_data fd;
7005 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
7006 NULL);
7007 iter_type = unsigned_type_for (fd.iter_type);
7009 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
7010 iter_type = unsigned_type_node;
7011 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
7012 if (c2)
7014 cond_ptr
7015 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7016 OMP_CLAUSE_DECL (c2) = cond_ptr;
7018 else
7020 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7021 DECL_CONTEXT (cond_ptr) = current_function_decl;
7022 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7023 DECL_CHAIN (cond_ptr) = ctx->block_vars;
7024 ctx->block_vars = cond_ptr;
7025 c2 = build_omp_clause (UNKNOWN_LOCATION,
7026 OMP_CLAUSE__CONDTEMP_);
7027 OMP_CLAUSE_DECL (c2) = cond_ptr;
7028 OMP_CLAUSE_CHAIN (c2) = *clauses;
7029 *clauses = c2;
7031 iter_var = create_tmp_var_raw (iter_type);
7032 DECL_CONTEXT (iter_var) = current_function_decl;
7033 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7034 DECL_CHAIN (iter_var) = ctx->block_vars;
7035 ctx->block_vars = iter_var;
7036 tree c3
7037 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7038 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7039 OMP_CLAUSE_DECL (c3) = iter_var;
7040 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7041 OMP_CLAUSE_CHAIN (c2) = c3;
7042 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7044 tree v = create_tmp_var_raw (iter_type);
7045 DECL_CONTEXT (v) = current_function_decl;
7046 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7047 DECL_CHAIN (v) = ctx->block_vars;
7048 ctx->block_vars = v;
7049 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7050 ctx->lastprivate_conditional_map->put (o, v);
7055 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7056 both parallel and workshare constructs. PREDICATE may be NULL if it's
7057 always true. BODY_P is the sequence to insert early initialization
7058 if needed, STMT_LIST is where the non-conditional lastprivate handling
7059 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7060 section. */
7062 static void
7063 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7064 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7065 omp_context *ctx)
7067 tree x, c, label = NULL, orig_clauses = clauses;
7068 bool par_clauses = false;
7069 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7070 unsigned HOST_WIDE_INT conditional_off = 0;
7071 gimple_seq post_stmt_list = NULL;
7073 /* Early exit if there are no lastprivate or linear clauses. */
7074 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7075 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7076 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7077 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7078 break;
7079 if (clauses == NULL)
7081 /* If this was a workshare clause, see if it had been combined
7082 with its parallel. In that case, look for the clauses on the
7083 parallel statement itself. */
7084 if (is_parallel_ctx (ctx))
7085 return;
7087 ctx = ctx->outer;
7088 if (ctx == NULL || !is_parallel_ctx (ctx))
7089 return;
7091 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7092 OMP_CLAUSE_LASTPRIVATE);
7093 if (clauses == NULL)
7094 return;
7095 par_clauses = true;
7098 bool maybe_simt = false;
7099 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7100 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7102 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7103 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7104 if (simduid)
7105 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7108 if (predicate)
7110 gcond *stmt;
7111 tree label_true, arm1, arm2;
7112 enum tree_code pred_code = TREE_CODE (predicate);
7114 label = create_artificial_label (UNKNOWN_LOCATION);
7115 label_true = create_artificial_label (UNKNOWN_LOCATION);
7116 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7118 arm1 = TREE_OPERAND (predicate, 0);
7119 arm2 = TREE_OPERAND (predicate, 1);
7120 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7121 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7123 else
7125 arm1 = predicate;
7126 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7127 arm2 = boolean_false_node;
7128 pred_code = NE_EXPR;
7130 if (maybe_simt)
7132 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7133 c = fold_convert (integer_type_node, c);
7134 simtcond = create_tmp_var (integer_type_node);
7135 gimplify_assign (simtcond, c, stmt_list);
7136 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7137 1, simtcond);
7138 c = create_tmp_var (integer_type_node);
7139 gimple_call_set_lhs (g, c);
7140 gimple_seq_add_stmt (stmt_list, g);
7141 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7142 label_true, label);
7144 else
7145 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7146 gimple_seq_add_stmt (stmt_list, stmt);
7147 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7150 tree cond_ptr = NULL_TREE;
7151 for (c = clauses; c ;)
7153 tree var, new_var;
7154 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7155 gimple_seq *this_stmt_list = stmt_list;
7156 tree lab2 = NULL_TREE;
7158 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7159 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7160 && ctx->lastprivate_conditional_map
7161 && !ctx->combined_into_simd_safelen1)
7163 gcc_assert (body_p);
7164 if (simduid)
7165 goto next;
7166 if (cond_ptr == NULL_TREE)
7168 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7169 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7171 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7172 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7173 tree v = *ctx->lastprivate_conditional_map->get (o);
7174 gimplify_assign (v, build_zero_cst (type), body_p);
7175 this_stmt_list = cstmt_list;
7176 tree mem;
7177 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7179 mem = build2 (MEM_REF, type, cond_ptr,
7180 build_int_cst (TREE_TYPE (cond_ptr),
7181 conditional_off));
7182 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7184 else
7185 mem = build4 (ARRAY_REF, type, cond_ptr,
7186 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7187 tree mem2 = copy_node (mem);
7188 gimple_seq seq = NULL;
7189 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7190 gimple_seq_add_seq (this_stmt_list, seq);
7191 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7192 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7193 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7194 gimple_seq_add_stmt (this_stmt_list, g);
7195 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7196 gimplify_assign (mem2, v, this_stmt_list);
7198 else if (predicate
7199 && ctx->combined_into_simd_safelen1
7200 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7201 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7202 && ctx->lastprivate_conditional_map)
7203 this_stmt_list = &post_stmt_list;
7205 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7206 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7207 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7209 var = OMP_CLAUSE_DECL (c);
7210 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7211 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7212 && is_taskloop_ctx (ctx))
7214 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7215 new_var = lookup_decl (var, ctx->outer);
7217 else
7219 new_var = lookup_decl (var, ctx);
7220 /* Avoid uninitialized warnings for lastprivate and
7221 for linear iterators. */
7222 if (predicate
7223 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7224 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7225 suppress_warning (new_var, OPT_Wuninitialized);
7228 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7230 tree val = DECL_VALUE_EXPR (new_var);
7231 if (TREE_CODE (val) == ARRAY_REF
7232 && VAR_P (TREE_OPERAND (val, 0))
7233 && lookup_attribute ("omp simd array",
7234 DECL_ATTRIBUTES (TREE_OPERAND (val,
7235 0))))
7237 if (lastlane == NULL)
7239 lastlane = create_tmp_var (unsigned_type_node);
7240 gcall *g
7241 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7242 2, simduid,
7243 TREE_OPERAND (val, 1));
7244 gimple_call_set_lhs (g, lastlane);
7245 gimple_seq_add_stmt (this_stmt_list, g);
7247 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7248 TREE_OPERAND (val, 0), lastlane,
7249 NULL_TREE, NULL_TREE);
7250 TREE_THIS_NOTRAP (new_var) = 1;
7253 else if (maybe_simt)
7255 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7256 ? DECL_VALUE_EXPR (new_var)
7257 : new_var);
7258 if (simtlast == NULL)
7260 simtlast = create_tmp_var (unsigned_type_node);
7261 gcall *g = gimple_build_call_internal
7262 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7263 gimple_call_set_lhs (g, simtlast);
7264 gimple_seq_add_stmt (this_stmt_list, g);
7266 x = build_call_expr_internal_loc
7267 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7268 TREE_TYPE (val), 2, val, simtlast);
7269 new_var = unshare_expr (new_var);
7270 gimplify_assign (new_var, x, this_stmt_list);
7271 new_var = unshare_expr (new_var);
7274 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7275 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7277 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7278 gimple_seq_add_seq (this_stmt_list,
7279 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7280 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7282 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7283 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7285 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7286 gimple_seq_add_seq (this_stmt_list,
7287 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7288 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7291 x = NULL_TREE;
7292 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7293 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7294 && is_taskloop_ctx (ctx))
7296 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7297 ctx->outer->outer);
7298 if (is_global_var (ovar))
7299 x = ovar;
7301 if (!x)
7302 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7303 if (omp_privatize_by_reference (var))
7304 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7305 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7306 gimplify_and_add (x, this_stmt_list);
7308 if (lab2)
7309 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7312 next:
7313 c = OMP_CLAUSE_CHAIN (c);
7314 if (c == NULL && !par_clauses)
7316 /* If this was a workshare clause, see if it had been combined
7317 with its parallel. In that case, continue looking for the
7318 clauses also on the parallel statement itself. */
7319 if (is_parallel_ctx (ctx))
7320 break;
7322 ctx = ctx->outer;
7323 if (ctx == NULL || !is_parallel_ctx (ctx))
7324 break;
7326 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7327 OMP_CLAUSE_LASTPRIVATE);
7328 par_clauses = true;
7332 if (label)
7333 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7334 gimple_seq_add_seq (stmt_list, post_stmt_list);
7337 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7338 (which might be a placeholder). INNER is true if this is an inner
7339 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7340 join markers. Generate the before-loop forking sequence in
7341 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7342 general form of these sequences is
7344 GOACC_REDUCTION_SETUP
7345 GOACC_FORK
7346 GOACC_REDUCTION_INIT
7348 GOACC_REDUCTION_FINI
7349 GOACC_JOIN
7350 GOACC_REDUCTION_TEARDOWN. */
7352 static void
7353 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7354 gcall *fork, gcall *private_marker, gcall *join,
7355 gimple_seq *fork_seq, gimple_seq *join_seq,
7356 omp_context *ctx)
7358 gimple_seq before_fork = NULL;
7359 gimple_seq after_fork = NULL;
7360 gimple_seq before_join = NULL;
7361 gimple_seq after_join = NULL;
7362 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7363 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7364 unsigned offset = 0;
7366 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7367 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7369 /* No 'reduction' clauses on OpenACC 'kernels'. */
7370 gcc_checking_assert (!is_oacc_kernels (ctx));
7371 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7372 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7374 tree orig = OMP_CLAUSE_DECL (c);
7375 tree var = maybe_lookup_decl (orig, ctx);
7376 tree ref_to_res = NULL_TREE;
7377 tree incoming, outgoing, v1, v2, v3;
7378 bool is_private = false;
7380 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7381 if (rcode == MINUS_EXPR)
7382 rcode = PLUS_EXPR;
7383 else if (rcode == TRUTH_ANDIF_EXPR)
7384 rcode = BIT_AND_EXPR;
7385 else if (rcode == TRUTH_ORIF_EXPR)
7386 rcode = BIT_IOR_EXPR;
7387 tree op = build_int_cst (unsigned_type_node, rcode);
7389 if (!var)
7390 var = orig;
7392 incoming = outgoing = var;
7394 if (!inner)
7396 /* See if an outer construct also reduces this variable. */
7397 omp_context *outer = ctx;
7399 while (omp_context *probe = outer->outer)
7401 enum gimple_code type = gimple_code (probe->stmt);
7402 tree cls;
7404 switch (type)
7406 case GIMPLE_OMP_FOR:
7407 cls = gimple_omp_for_clauses (probe->stmt);
7408 break;
7410 case GIMPLE_OMP_TARGET:
7411 /* No 'reduction' clauses inside OpenACC 'kernels'
7412 regions. */
7413 gcc_checking_assert (!is_oacc_kernels (probe));
7415 if (!is_gimple_omp_offloaded (probe->stmt))
7416 goto do_lookup;
7418 cls = gimple_omp_target_clauses (probe->stmt);
7419 break;
7421 default:
7422 goto do_lookup;
7425 outer = probe;
7426 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7427 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7428 && orig == OMP_CLAUSE_DECL (cls))
7430 incoming = outgoing = lookup_decl (orig, probe);
7431 goto has_outer_reduction;
7433 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7434 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7435 && orig == OMP_CLAUSE_DECL (cls))
7437 is_private = true;
7438 goto do_lookup;
7442 do_lookup:
7443 /* This is the outermost construct with this reduction,
7444 see if there's a mapping for it. */
7445 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7446 && maybe_lookup_field (orig, outer) && !is_private)
7448 ref_to_res = build_receiver_ref (orig, false, outer);
7449 if (omp_privatize_by_reference (orig))
7450 ref_to_res = build_simple_mem_ref (ref_to_res);
7452 tree type = TREE_TYPE (var);
7453 if (POINTER_TYPE_P (type))
7454 type = TREE_TYPE (type);
7456 outgoing = var;
7457 incoming = omp_reduction_init_op (loc, rcode, type);
7459 else
7461 /* Try to look at enclosing contexts for reduction var,
7462 use original if no mapping found. */
7463 tree t = NULL_TREE;
7464 omp_context *c = ctx->outer;
7465 while (c && !t)
7467 t = maybe_lookup_decl (orig, c);
7468 c = c->outer;
7470 incoming = outgoing = (t ? t : orig);
7473 has_outer_reduction:;
7476 if (!ref_to_res)
7477 ref_to_res = integer_zero_node;
7479 if (omp_privatize_by_reference (orig))
7481 tree type = TREE_TYPE (var);
7482 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7484 if (!inner)
7486 tree x = create_tmp_var (TREE_TYPE (type), id);
7487 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7490 v1 = create_tmp_var (type, id);
7491 v2 = create_tmp_var (type, id);
7492 v3 = create_tmp_var (type, id);
7494 gimplify_assign (v1, var, fork_seq);
7495 gimplify_assign (v2, var, fork_seq);
7496 gimplify_assign (v3, var, fork_seq);
7498 var = build_simple_mem_ref (var);
7499 v1 = build_simple_mem_ref (v1);
7500 v2 = build_simple_mem_ref (v2);
7501 v3 = build_simple_mem_ref (v3);
7502 outgoing = build_simple_mem_ref (outgoing);
7504 if (!TREE_CONSTANT (incoming))
7505 incoming = build_simple_mem_ref (incoming);
7507 else
7508 v1 = v2 = v3 = var;
7510 /* Determine position in reduction buffer, which may be used
7511 by target. The parser has ensured that this is not a
7512 variable-sized type. */
7513 fixed_size_mode mode
7514 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7515 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7516 offset = (offset + align - 1) & ~(align - 1);
7517 tree off = build_int_cst (sizetype, offset);
7518 offset += GET_MODE_SIZE (mode);
7520 if (!init_code)
7522 init_code = build_int_cst (integer_type_node,
7523 IFN_GOACC_REDUCTION_INIT);
7524 fini_code = build_int_cst (integer_type_node,
7525 IFN_GOACC_REDUCTION_FINI);
7526 setup_code = build_int_cst (integer_type_node,
7527 IFN_GOACC_REDUCTION_SETUP);
7528 teardown_code = build_int_cst (integer_type_node,
7529 IFN_GOACC_REDUCTION_TEARDOWN);
7532 tree setup_call
7533 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7534 TREE_TYPE (var), 6, setup_code,
7535 unshare_expr (ref_to_res),
7536 incoming, level, op, off);
7537 tree init_call
7538 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7539 TREE_TYPE (var), 6, init_code,
7540 unshare_expr (ref_to_res),
7541 v1, level, op, off);
7542 tree fini_call
7543 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7544 TREE_TYPE (var), 6, fini_code,
7545 unshare_expr (ref_to_res),
7546 v2, level, op, off);
7547 tree teardown_call
7548 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7549 TREE_TYPE (var), 6, teardown_code,
7550 ref_to_res, v3, level, op, off);
7552 gimplify_assign (v1, setup_call, &before_fork);
7553 gimplify_assign (v2, init_call, &after_fork);
7554 gimplify_assign (v3, fini_call, &before_join);
7555 gimplify_assign (outgoing, teardown_call, &after_join);
7558 /* Now stitch things together. */
7559 gimple_seq_add_seq (fork_seq, before_fork);
7560 if (private_marker)
7561 gimple_seq_add_stmt (fork_seq, private_marker);
7562 if (fork)
7563 gimple_seq_add_stmt (fork_seq, fork);
7564 gimple_seq_add_seq (fork_seq, after_fork);
7566 gimple_seq_add_seq (join_seq, before_join);
7567 if (join)
7568 gimple_seq_add_stmt (join_seq, join);
7569 gimple_seq_add_seq (join_seq, after_join);
7572 /* Generate code to implement the REDUCTION clauses, append it
7573 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7574 that should be emitted also inside of the critical section,
7575 in that case clear *CLIST afterwards, otherwise leave it as is
7576 and let the caller emit it itself. */
7578 static void
7579 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7580 gimple_seq *clist, omp_context *ctx)
7582 gimple_seq sub_seq = NULL;
7583 gimple *stmt;
7584 tree x, c;
7585 int count = 0;
7587 /* OpenACC loop reductions are handled elsewhere. */
7588 if (is_gimple_omp_oacc (ctx->stmt))
7589 return;
7591 /* SIMD reductions are handled in lower_rec_input_clauses. */
7592 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7593 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7594 return;
7596 /* inscan reductions are handled elsewhere. */
7597 if (ctx->scan_inclusive || ctx->scan_exclusive)
7598 return;
7600 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7601 update in that case, otherwise use a lock. */
7602 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7603 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7604 && !OMP_CLAUSE_REDUCTION_TASK (c))
7606 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7607 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7609 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7610 count = -1;
7611 break;
7613 count++;
7616 if (count == 0)
7617 return;
7619 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7621 tree var, ref, new_var, orig_var;
7622 enum tree_code code;
7623 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7625 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7626 || OMP_CLAUSE_REDUCTION_TASK (c))
7627 continue;
7629 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7630 orig_var = var = OMP_CLAUSE_DECL (c);
7631 if (TREE_CODE (var) == MEM_REF)
7633 var = TREE_OPERAND (var, 0);
7634 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7635 var = TREE_OPERAND (var, 0);
7636 if (TREE_CODE (var) == ADDR_EXPR)
7637 var = TREE_OPERAND (var, 0);
7638 else
7640 /* If this is a pointer or referenced based array
7641 section, the var could be private in the outer
7642 context e.g. on orphaned loop construct. Pretend this
7643 is private variable's outer reference. */
7644 ccode = OMP_CLAUSE_PRIVATE;
7645 if (TREE_CODE (var) == INDIRECT_REF)
7646 var = TREE_OPERAND (var, 0);
7648 orig_var = var;
7649 if (is_variable_sized (var))
7651 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7652 var = DECL_VALUE_EXPR (var);
7653 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7654 var = TREE_OPERAND (var, 0);
7655 gcc_assert (DECL_P (var));
7658 new_var = lookup_decl (var, ctx);
7659 if (var == OMP_CLAUSE_DECL (c)
7660 && omp_privatize_by_reference (var))
7661 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7662 ref = build_outer_var_ref (var, ctx, ccode);
7663 code = OMP_CLAUSE_REDUCTION_CODE (c);
7665 /* reduction(-:var) sums up the partial results, so it acts
7666 identically to reduction(+:var). */
7667 if (code == MINUS_EXPR)
7668 code = PLUS_EXPR;
7670 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7671 if (count == 1)
7673 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7675 addr = save_expr (addr);
7676 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7677 tree new_var2 = new_var;
7678 tree ref2 = ref;
7679 if (is_truth_op)
7681 tree zero = build_zero_cst (TREE_TYPE (new_var));
7682 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7683 boolean_type_node, new_var, zero);
7684 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7685 ref, zero);
7687 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7688 new_var2);
7689 if (is_truth_op)
7690 x = fold_convert (TREE_TYPE (new_var), x);
7691 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7692 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7693 gimplify_and_add (x, stmt_seqp);
7694 return;
7696 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7698 tree d = OMP_CLAUSE_DECL (c);
7699 tree type = TREE_TYPE (d);
7700 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7701 tree i = create_tmp_var (TREE_TYPE (v));
7702 tree ptype = build_pointer_type (TREE_TYPE (type));
7703 tree bias = TREE_OPERAND (d, 1);
7704 d = TREE_OPERAND (d, 0);
7705 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7707 tree b = TREE_OPERAND (d, 1);
7708 b = maybe_lookup_decl (b, ctx);
7709 if (b == NULL)
7711 b = TREE_OPERAND (d, 1);
7712 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7714 if (integer_zerop (bias))
7715 bias = b;
7716 else
7718 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7719 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7720 TREE_TYPE (b), b, bias);
7722 d = TREE_OPERAND (d, 0);
7724 /* For ref build_outer_var_ref already performs this, so
7725 only new_var needs a dereference. */
7726 if (TREE_CODE (d) == INDIRECT_REF)
7728 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7729 gcc_assert (omp_privatize_by_reference (var)
7730 && var == orig_var);
7732 else if (TREE_CODE (d) == ADDR_EXPR)
7734 if (orig_var == var)
7736 new_var = build_fold_addr_expr (new_var);
7737 ref = build_fold_addr_expr (ref);
7740 else
7742 gcc_assert (orig_var == var);
7743 if (omp_privatize_by_reference (var))
7744 ref = build_fold_addr_expr (ref);
7746 if (DECL_P (v))
7748 tree t = maybe_lookup_decl (v, ctx);
7749 if (t)
7750 v = t;
7751 else
7752 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7753 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7755 if (!integer_zerop (bias))
7757 bias = fold_convert_loc (clause_loc, sizetype, bias);
7758 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7759 TREE_TYPE (new_var), new_var,
7760 unshare_expr (bias));
7761 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7762 TREE_TYPE (ref), ref, bias);
7764 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7765 ref = fold_convert_loc (clause_loc, ptype, ref);
7766 tree m = create_tmp_var (ptype);
7767 gimplify_assign (m, new_var, stmt_seqp);
7768 new_var = m;
7769 m = create_tmp_var (ptype);
7770 gimplify_assign (m, ref, stmt_seqp);
7771 ref = m;
7772 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7773 tree body = create_artificial_label (UNKNOWN_LOCATION);
7774 tree end = create_artificial_label (UNKNOWN_LOCATION);
7775 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7776 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7777 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7778 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7780 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7781 tree decl_placeholder
7782 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7783 SET_DECL_VALUE_EXPR (placeholder, out);
7784 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7785 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7786 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7787 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7788 gimple_seq_add_seq (&sub_seq,
7789 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7790 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7791 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7792 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7794 else
7796 tree out2 = out;
7797 tree priv2 = priv;
7798 if (is_truth_op)
7800 tree zero = build_zero_cst (TREE_TYPE (out));
7801 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7802 boolean_type_node, out, zero);
7803 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7804 boolean_type_node, priv, zero);
7806 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7807 if (is_truth_op)
7808 x = fold_convert (TREE_TYPE (out), x);
7809 out = unshare_expr (out);
7810 gimplify_assign (out, x, &sub_seq);
7812 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7813 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7814 gimple_seq_add_stmt (&sub_seq, g);
7815 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7816 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7817 gimple_seq_add_stmt (&sub_seq, g);
7818 g = gimple_build_assign (i, PLUS_EXPR, i,
7819 build_int_cst (TREE_TYPE (i), 1));
7820 gimple_seq_add_stmt (&sub_seq, g);
7821 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7822 gimple_seq_add_stmt (&sub_seq, g);
7823 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7825 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7827 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7829 if (omp_privatize_by_reference (var)
7830 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7831 TREE_TYPE (ref)))
7832 ref = build_fold_addr_expr_loc (clause_loc, ref);
7833 SET_DECL_VALUE_EXPR (placeholder, ref);
7834 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7835 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7836 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7837 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7838 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7840 else
7842 tree new_var2 = new_var;
7843 tree ref2 = ref;
7844 if (is_truth_op)
7846 tree zero = build_zero_cst (TREE_TYPE (new_var));
7847 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7848 boolean_type_node, new_var, zero);
7849 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7850 ref, zero);
7852 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7853 if (is_truth_op)
7854 x = fold_convert (TREE_TYPE (new_var), x);
7855 ref = build_outer_var_ref (var, ctx);
7856 gimplify_assign (ref, x, &sub_seq);
7860 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7862 gimple_seq_add_stmt (stmt_seqp, stmt);
7864 gimple_seq_add_seq (stmt_seqp, sub_seq);
7866 if (clist)
7868 gimple_seq_add_seq (stmt_seqp, *clist);
7869 *clist = NULL;
7872 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7874 gimple_seq_add_stmt (stmt_seqp, stmt);
7878 /* Generate code to implement the COPYPRIVATE clauses. */
7880 static void
7881 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7882 omp_context *ctx)
7884 tree c;
7886 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7888 tree var, new_var, ref, x;
7889 bool by_ref;
7890 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7892 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7893 continue;
7895 var = OMP_CLAUSE_DECL (c);
7896 by_ref = use_pointer_for_field (var, NULL);
7898 ref = build_sender_ref (var, ctx);
7899 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7900 if (by_ref)
7902 x = build_fold_addr_expr_loc (clause_loc, new_var);
7903 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7905 gimplify_assign (ref, x, slist);
7907 ref = build_receiver_ref (var, false, ctx);
7908 if (by_ref)
7910 ref = fold_convert_loc (clause_loc,
7911 build_pointer_type (TREE_TYPE (new_var)),
7912 ref);
7913 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7915 if (omp_privatize_by_reference (var))
7917 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7918 ref = build_simple_mem_ref_loc (clause_loc, ref);
7919 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7921 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7922 gimplify_and_add (x, rlist);
7927 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7928 and REDUCTION from the sender (aka parent) side. */
7930 static void
7931 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7932 omp_context *ctx)
7934 tree c, t;
7935 int ignored_looptemp = 0;
7936 bool is_taskloop = false;
7938 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7939 by GOMP_taskloop. */
7940 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7942 ignored_looptemp = 2;
7943 is_taskloop = true;
7946 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7948 tree val, ref, x, var;
7949 bool by_ref, do_in = false, do_out = false;
7950 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7952 switch (OMP_CLAUSE_CODE (c))
7954 case OMP_CLAUSE_PRIVATE:
7955 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7956 break;
7957 continue;
7958 case OMP_CLAUSE_FIRSTPRIVATE:
7959 case OMP_CLAUSE_COPYIN:
7960 case OMP_CLAUSE_LASTPRIVATE:
7961 case OMP_CLAUSE_IN_REDUCTION:
7962 case OMP_CLAUSE__REDUCTEMP_:
7963 break;
7964 case OMP_CLAUSE_REDUCTION:
7965 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7966 continue;
7967 break;
7968 case OMP_CLAUSE_SHARED:
7969 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7970 break;
7971 continue;
7972 case OMP_CLAUSE__LOOPTEMP_:
7973 if (ignored_looptemp)
7975 ignored_looptemp--;
7976 continue;
7978 break;
7979 default:
7980 continue;
7983 val = OMP_CLAUSE_DECL (c);
7984 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7985 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7986 && TREE_CODE (val) == MEM_REF)
7988 val = TREE_OPERAND (val, 0);
7989 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7990 val = TREE_OPERAND (val, 0);
7991 if (TREE_CODE (val) == INDIRECT_REF
7992 || TREE_CODE (val) == ADDR_EXPR)
7993 val = TREE_OPERAND (val, 0);
7994 if (is_variable_sized (val))
7995 continue;
7998 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7999 outer taskloop region. */
8000 omp_context *ctx_for_o = ctx;
8001 if (is_taskloop
8002 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8003 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8004 ctx_for_o = ctx->outer;
8006 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
8008 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8009 && is_global_var (var)
8010 && (val == OMP_CLAUSE_DECL (c)
8011 || !is_task_ctx (ctx)
8012 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8013 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8014 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8015 != POINTER_TYPE)))))
8016 continue;
8018 t = omp_member_access_dummy_var (var);
8019 if (t)
8021 var = DECL_VALUE_EXPR (var);
8022 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
8023 if (o != t)
8024 var = unshare_and_remap (var, t, o);
8025 else
8026 var = unshare_expr (var);
8029 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8031 /* Handle taskloop firstprivate/lastprivate, where the
8032 lastprivate on GIMPLE_OMP_TASK is represented as
8033 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8034 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
8035 x = omp_build_component_ref (ctx->sender_decl, f);
8036 if (use_pointer_for_field (val, ctx))
8037 var = build_fold_addr_expr (var);
8038 gimplify_assign (x, var, ilist);
8039 DECL_ABSTRACT_ORIGIN (f) = NULL;
8040 continue;
8043 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8044 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8045 || val == OMP_CLAUSE_DECL (c))
8046 && is_variable_sized (val))
8047 continue;
8048 by_ref = use_pointer_for_field (val, NULL);
8050 switch (OMP_CLAUSE_CODE (c))
8052 case OMP_CLAUSE_FIRSTPRIVATE:
8053 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8054 && !by_ref
8055 && is_task_ctx (ctx))
8056 suppress_warning (var);
8057 do_in = true;
8058 break;
8060 case OMP_CLAUSE_PRIVATE:
8061 case OMP_CLAUSE_COPYIN:
8062 case OMP_CLAUSE__LOOPTEMP_:
8063 case OMP_CLAUSE__REDUCTEMP_:
8064 do_in = true;
8065 break;
8067 case OMP_CLAUSE_LASTPRIVATE:
8068 if (by_ref || omp_privatize_by_reference (val))
8070 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8071 continue;
8072 do_in = true;
8074 else
8076 do_out = true;
8077 if (lang_hooks.decls.omp_private_outer_ref (val))
8078 do_in = true;
8080 break;
8082 case OMP_CLAUSE_REDUCTION:
8083 case OMP_CLAUSE_IN_REDUCTION:
8084 do_in = true;
8085 if (val == OMP_CLAUSE_DECL (c))
8087 if (is_task_ctx (ctx))
8088 by_ref = use_pointer_for_field (val, ctx);
8089 else
8090 do_out = !(by_ref || omp_privatize_by_reference (val));
8092 else
8093 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8094 break;
8096 default:
8097 gcc_unreachable ();
8100 if (do_in)
8102 ref = build_sender_ref (val, ctx);
8103 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8104 gimplify_assign (ref, x, ilist);
8105 if (is_task_ctx (ctx))
8106 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8109 if (do_out)
8111 ref = build_sender_ref (val, ctx);
8112 gimplify_assign (var, ref, olist);
8117 /* Generate code to implement SHARED from the sender (aka parent)
8118 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8119 list things that got automatically shared. */
8121 static void
8122 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8124 tree var, ovar, nvar, t, f, x, record_type;
8126 if (ctx->record_type == NULL)
8127 return;
8129 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8130 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8132 ovar = DECL_ABSTRACT_ORIGIN (f);
8133 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8134 continue;
8136 nvar = maybe_lookup_decl (ovar, ctx);
8137 if (!nvar
8138 || !DECL_HAS_VALUE_EXPR_P (nvar)
8139 || (ctx->allocate_map
8140 && ctx->allocate_map->get (ovar)))
8141 continue;
8143 /* If CTX is a nested parallel directive. Find the immediately
8144 enclosing parallel or workshare construct that contains a
8145 mapping for OVAR. */
8146 var = lookup_decl_in_outer_ctx (ovar, ctx);
8148 t = omp_member_access_dummy_var (var);
8149 if (t)
8151 var = DECL_VALUE_EXPR (var);
8152 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8153 if (o != t)
8154 var = unshare_and_remap (var, t, o);
8155 else
8156 var = unshare_expr (var);
8159 if (use_pointer_for_field (ovar, ctx))
8161 x = build_sender_ref (ovar, ctx);
8162 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8163 && TREE_TYPE (f) == TREE_TYPE (ovar))
8165 gcc_assert (is_parallel_ctx (ctx)
8166 && DECL_ARTIFICIAL (ovar));
8167 /* _condtemp_ clause. */
8168 var = build_constructor (TREE_TYPE (x), NULL);
8170 else
8171 var = build_fold_addr_expr (var);
8172 gimplify_assign (x, var, ilist);
8174 else
8176 x = build_sender_ref (ovar, ctx);
8177 gimplify_assign (x, var, ilist);
8179 if (!TREE_READONLY (var)
8180 /* We don't need to receive a new reference to a result
8181 or parm decl. In fact we may not store to it as we will
8182 invalidate any pending RSO and generate wrong gimple
8183 during inlining. */
8184 && !((TREE_CODE (var) == RESULT_DECL
8185 || TREE_CODE (var) == PARM_DECL)
8186 && DECL_BY_REFERENCE (var)))
8188 x = build_sender_ref (ovar, ctx);
8189 gimplify_assign (var, x, olist);
8195 /* Emit an OpenACC head marker call, encapulating the partitioning and
8196 other information that must be processed by the target compiler.
8197 Return the maximum number of dimensions the associated loop might
8198 be partitioned over. */
8200 static unsigned
8201 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8202 gimple_seq *seq, omp_context *ctx)
8204 unsigned levels = 0;
8205 unsigned tag = 0;
8206 tree gang_static = NULL_TREE;
8207 auto_vec<tree, 5> args;
8209 args.quick_push (build_int_cst
8210 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8211 args.quick_push (ddvar);
8212 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8214 switch (OMP_CLAUSE_CODE (c))
8216 case OMP_CLAUSE_GANG:
8217 tag |= OLF_DIM_GANG;
8218 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8219 /* static:* is represented by -1, and we can ignore it, as
8220 scheduling is always static. */
8221 if (gang_static && integer_minus_onep (gang_static))
8222 gang_static = NULL_TREE;
8223 levels++;
8224 break;
8226 case OMP_CLAUSE_WORKER:
8227 tag |= OLF_DIM_WORKER;
8228 levels++;
8229 break;
8231 case OMP_CLAUSE_VECTOR:
8232 tag |= OLF_DIM_VECTOR;
8233 levels++;
8234 break;
8236 case OMP_CLAUSE_SEQ:
8237 tag |= OLF_SEQ;
8238 break;
8240 case OMP_CLAUSE_AUTO:
8241 tag |= OLF_AUTO;
8242 break;
8244 case OMP_CLAUSE_INDEPENDENT:
8245 tag |= OLF_INDEPENDENT;
8246 break;
8248 case OMP_CLAUSE_TILE:
8249 tag |= OLF_TILE;
8250 break;
8252 default:
8253 continue;
8257 if (gang_static)
8259 if (DECL_P (gang_static))
8260 gang_static = build_outer_var_ref (gang_static, ctx);
8261 tag |= OLF_GANG_STATIC;
8264 omp_context *tgt = enclosing_target_ctx (ctx);
8265 if (!tgt || is_oacc_parallel_or_serial (tgt))
8267 else if (is_oacc_kernels (tgt))
8268 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8269 gcc_unreachable ();
8270 else if (is_oacc_kernels_decomposed_part (tgt))
8272 else
8273 gcc_unreachable ();
8275 /* In a parallel region, loops are implicitly INDEPENDENT. */
8276 if (!tgt || is_oacc_parallel_or_serial (tgt))
8277 tag |= OLF_INDEPENDENT;
8279 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8280 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8281 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8283 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8284 gcc_assert (!(tag & OLF_AUTO));
8287 if (tag & OLF_TILE)
8288 /* Tiling could use all 3 levels. */
8289 levels = 3;
8290 else
8292 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8293 Ensure at least one level, or 2 for possible auto
8294 partitioning */
8295 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8296 << OLF_DIM_BASE) | OLF_SEQ));
8298 if (levels < 1u + maybe_auto)
8299 levels = 1u + maybe_auto;
8302 args.quick_push (build_int_cst (integer_type_node, levels));
8303 args.quick_push (build_int_cst (integer_type_node, tag));
8304 if (gang_static)
8305 args.quick_push (gang_static);
8307 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8308 gimple_set_location (call, loc);
8309 gimple_set_lhs (call, ddvar);
8310 gimple_seq_add_stmt (seq, call);
8312 return levels;
8315 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8316 partitioning level of the enclosed region. */
8318 static void
8319 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8320 tree tofollow, gimple_seq *seq)
8322 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8323 : IFN_UNIQUE_OACC_TAIL_MARK);
8324 tree marker = build_int_cst (integer_type_node, marker_kind);
8325 int nargs = 2 + (tofollow != NULL_TREE);
8326 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8327 marker, ddvar, tofollow);
8328 gimple_set_location (call, loc);
8329 gimple_set_lhs (call, ddvar);
8330 gimple_seq_add_stmt (seq, call);
8333 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8334 the loop clauses, from which we extract reductions. Initialize
8335 HEAD and TAIL. */
8337 static void
8338 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8339 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8341 bool inner = false;
8342 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8343 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8345 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8347 if (private_marker)
8349 gimple_set_location (private_marker, loc);
8350 gimple_call_set_lhs (private_marker, ddvar);
8351 gimple_call_set_arg (private_marker, 1, ddvar);
8354 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8355 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8357 gcc_assert (count);
8358 for (unsigned done = 1; count; count--, done++)
8360 gimple_seq fork_seq = NULL;
8361 gimple_seq join_seq = NULL;
8363 tree place = build_int_cst (integer_type_node, -1);
8364 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8365 fork_kind, ddvar, place);
8366 gimple_set_location (fork, loc);
8367 gimple_set_lhs (fork, ddvar);
8369 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8370 join_kind, ddvar, place);
8371 gimple_set_location (join, loc);
8372 gimple_set_lhs (join, ddvar);
8374 /* Mark the beginning of this level sequence. */
8375 if (inner)
8376 lower_oacc_loop_marker (loc, ddvar, true,
8377 build_int_cst (integer_type_node, count),
8378 &fork_seq);
8379 lower_oacc_loop_marker (loc, ddvar, false,
8380 build_int_cst (integer_type_node, done),
8381 &join_seq);
8383 lower_oacc_reductions (loc, clauses, place, inner,
8384 fork, (count == 1) ? private_marker : NULL,
8385 join, &fork_seq, &join_seq, ctx);
8387 /* Append this level to head. */
8388 gimple_seq_add_seq (head, fork_seq);
8389 /* Prepend it to tail. */
8390 gimple_seq_add_seq (&join_seq, *tail);
8391 *tail = join_seq;
8393 inner = true;
8396 /* Mark the end of the sequence. */
8397 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8398 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8401 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8402 catch handler and return it. This prevents programs from violating the
8403 structured block semantics with throws. */
8405 static gimple_seq
8406 maybe_catch_exception (gimple_seq body)
8408 gimple *g;
8409 tree decl;
8411 if (!flag_exceptions)
8412 return body;
8414 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8415 decl = lang_hooks.eh_protect_cleanup_actions ();
8416 else
8417 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8419 g = gimple_build_eh_must_not_throw (decl);
8420 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8421 GIMPLE_TRY_CATCH);
8423 return gimple_seq_alloc_with_stmt (g);
8427 /* Routines to lower OMP directives into OMP-GIMPLE. */
8429 /* If ctx is a worksharing context inside of a cancellable parallel
8430 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8431 and conditional branch to parallel's cancel_label to handle
8432 cancellation in the implicit barrier. */
8434 static void
8435 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8436 gimple_seq *body)
8438 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8439 if (gimple_omp_return_nowait_p (omp_return))
8440 return;
8441 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8442 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8443 && outer->cancellable)
8445 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8446 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8447 tree lhs = create_tmp_var (c_bool_type);
8448 gimple_omp_return_set_lhs (omp_return, lhs);
8449 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8450 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8451 fold_convert (c_bool_type,
8452 boolean_false_node),
8453 outer->cancel_label, fallthru_label);
8454 gimple_seq_add_stmt (body, g);
8455 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8457 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8458 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8459 return;
8462 /* Find the first task_reduction or reduction clause or return NULL
8463 if there are none. */
8465 static inline tree
8466 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8467 enum omp_clause_code ccode)
8469 while (1)
8471 clauses = omp_find_clause (clauses, ccode);
8472 if (clauses == NULL_TREE)
8473 return NULL_TREE;
8474 if (ccode != OMP_CLAUSE_REDUCTION
8475 || code == OMP_TASKLOOP
8476 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8477 return clauses;
8478 clauses = OMP_CLAUSE_CHAIN (clauses);
8482 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8483 gimple_seq *, gimple_seq *);
8485 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8486 CTX is the enclosing OMP context for the current statement. */
8488 static void
8489 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8491 tree block, control;
8492 gimple_stmt_iterator tgsi;
8493 gomp_sections *stmt;
8494 gimple *t;
8495 gbind *new_stmt, *bind;
8496 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8498 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8500 push_gimplify_context ();
8502 dlist = NULL;
8503 ilist = NULL;
8505 tree rclauses
8506 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8507 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8508 tree rtmp = NULL_TREE;
8509 if (rclauses)
8511 tree type = build_pointer_type (pointer_sized_int_node);
8512 tree temp = create_tmp_var (type);
8513 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8514 OMP_CLAUSE_DECL (c) = temp;
8515 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8516 gimple_omp_sections_set_clauses (stmt, c);
8517 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8518 gimple_omp_sections_clauses (stmt),
8519 &ilist, &tred_dlist);
8520 rclauses = c;
8521 rtmp = make_ssa_name (type);
8522 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8525 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8526 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8528 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8529 &ilist, &dlist, ctx, NULL);
8531 control = create_tmp_var (unsigned_type_node, ".section");
8532 gimple_omp_sections_set_control (stmt, control);
8534 new_body = gimple_omp_body (stmt);
8535 gimple_omp_set_body (stmt, NULL);
8536 tgsi = gsi_start (new_body);
8537 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8539 omp_context *sctx;
8540 gimple *sec_start;
8542 sec_start = gsi_stmt (tgsi);
8543 sctx = maybe_lookup_ctx (sec_start);
8544 gcc_assert (sctx);
8546 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8547 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8548 GSI_CONTINUE_LINKING);
8549 gimple_omp_set_body (sec_start, NULL);
8551 if (gsi_one_before_end_p (tgsi))
8553 gimple_seq l = NULL;
8554 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8555 &ilist, &l, &clist, ctx);
8556 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8557 gimple_omp_section_set_last (sec_start);
8560 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8561 GSI_CONTINUE_LINKING);
8564 block = make_node (BLOCK);
8565 bind = gimple_build_bind (NULL, new_body, block);
8567 olist = NULL;
8568 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8569 &clist, ctx);
8570 if (clist)
8572 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8573 gcall *g = gimple_build_call (fndecl, 0);
8574 gimple_seq_add_stmt (&olist, g);
8575 gimple_seq_add_seq (&olist, clist);
8576 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8577 g = gimple_build_call (fndecl, 0);
8578 gimple_seq_add_stmt (&olist, g);
8581 block = make_node (BLOCK);
8582 new_stmt = gimple_build_bind (NULL, NULL, block);
8583 gsi_replace (gsi_p, new_stmt, true);
8585 pop_gimplify_context (new_stmt);
8586 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8587 BLOCK_VARS (block) = gimple_bind_vars (bind);
8588 if (BLOCK_VARS (block))
8589 TREE_USED (block) = 1;
8591 new_body = NULL;
8592 gimple_seq_add_seq (&new_body, ilist);
8593 gimple_seq_add_stmt (&new_body, stmt);
8594 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8595 gimple_seq_add_stmt (&new_body, bind);
8597 t = gimple_build_omp_continue (control, control);
8598 gimple_seq_add_stmt (&new_body, t);
8600 gimple_seq_add_seq (&new_body, olist);
8601 if (ctx->cancellable)
8602 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8603 gimple_seq_add_seq (&new_body, dlist);
8605 new_body = maybe_catch_exception (new_body);
8607 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8608 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8609 t = gimple_build_omp_return (nowait);
8610 gimple_seq_add_stmt (&new_body, t);
8611 gimple_seq_add_seq (&new_body, tred_dlist);
8612 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8614 if (rclauses)
8615 OMP_CLAUSE_DECL (rclauses) = rtmp;
8617 gimple_bind_set_body (new_stmt, new_body);
8621 /* A subroutine of lower_omp_single. Expand the simple form of
8622 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8624 if (GOMP_single_start ())
8625 BODY;
8626 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8628 FIXME. It may be better to delay expanding the logic of this until
8629 pass_expand_omp. The expanded logic may make the job more difficult
8630 to a synchronization analysis pass. */
8632 static void
8633 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8635 location_t loc = gimple_location (single_stmt);
8636 tree tlabel = create_artificial_label (loc);
8637 tree flabel = create_artificial_label (loc);
8638 gimple *call, *cond;
8639 tree lhs, decl;
8641 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8642 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8643 call = gimple_build_call (decl, 0);
8644 gimple_call_set_lhs (call, lhs);
8645 gimple_seq_add_stmt (pre_p, call);
8647 cond = gimple_build_cond (EQ_EXPR, lhs,
8648 fold_convert_loc (loc, TREE_TYPE (lhs),
8649 boolean_true_node),
8650 tlabel, flabel);
8651 gimple_seq_add_stmt (pre_p, cond);
8652 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8653 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8654 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8658 /* A subroutine of lower_omp_single. Expand the simple form of
8659 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8661 #pragma omp single copyprivate (a, b, c)
8663 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8666 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8668 BODY;
8669 copyout.a = a;
8670 copyout.b = b;
8671 copyout.c = c;
8672 GOMP_single_copy_end (&copyout);
8674 else
8676 a = copyout_p->a;
8677 b = copyout_p->b;
8678 c = copyout_p->c;
8680 GOMP_barrier ();
8683 FIXME. It may be better to delay expanding the logic of this until
8684 pass_expand_omp. The expanded logic may make the job more difficult
8685 to a synchronization analysis pass. */
8687 static void
8688 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8689 omp_context *ctx)
8691 tree ptr_type, t, l0, l1, l2, bfn_decl;
8692 gimple_seq copyin_seq;
8693 location_t loc = gimple_location (single_stmt);
8695 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8697 ptr_type = build_pointer_type (ctx->record_type);
8698 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8700 l0 = create_artificial_label (loc);
8701 l1 = create_artificial_label (loc);
8702 l2 = create_artificial_label (loc);
8704 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8705 t = build_call_expr_loc (loc, bfn_decl, 0);
8706 t = fold_convert_loc (loc, ptr_type, t);
8707 gimplify_assign (ctx->receiver_decl, t, pre_p);
8709 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8710 build_int_cst (ptr_type, 0));
8711 t = build3 (COND_EXPR, void_type_node, t,
8712 build_and_jump (&l0), build_and_jump (&l1));
8713 gimplify_and_add (t, pre_p);
8715 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8717 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8719 copyin_seq = NULL;
8720 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8721 &copyin_seq, ctx);
8723 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8724 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8725 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8726 gimplify_and_add (t, pre_p);
8728 t = build_and_jump (&l2);
8729 gimplify_and_add (t, pre_p);
8731 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8733 gimple_seq_add_seq (pre_p, copyin_seq);
8735 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8739 /* Expand code for an OpenMP single directive. */
8741 static void
8742 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8744 tree block;
8745 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8746 gbind *bind;
8747 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8749 push_gimplify_context ();
8751 block = make_node (BLOCK);
8752 bind = gimple_build_bind (NULL, NULL, block);
8753 gsi_replace (gsi_p, bind, true);
8754 bind_body = NULL;
8755 dlist = NULL;
8756 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8757 &bind_body, &dlist, ctx, NULL);
8758 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8760 gimple_seq_add_stmt (&bind_body, single_stmt);
8762 if (ctx->record_type)
8763 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8764 else
8765 lower_omp_single_simple (single_stmt, &bind_body);
8767 gimple_omp_set_body (single_stmt, NULL);
8769 gimple_seq_add_seq (&bind_body, dlist);
8771 bind_body = maybe_catch_exception (bind_body);
8773 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8774 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8775 gimple *g = gimple_build_omp_return (nowait);
8776 gimple_seq_add_stmt (&bind_body_tail, g);
8777 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8778 if (ctx->record_type)
8780 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8781 tree clobber = build_clobber (ctx->record_type);
8782 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8783 clobber), GSI_SAME_STMT);
8785 gimple_seq_add_seq (&bind_body, bind_body_tail);
8786 gimple_bind_set_body (bind, bind_body);
8788 pop_gimplify_context (bind);
8790 gimple_bind_append_vars (bind, ctx->block_vars);
8791 BLOCK_VARS (block) = ctx->block_vars;
8792 if (BLOCK_VARS (block))
8793 TREE_USED (block) = 1;
8797 /* Lower code for an OMP scope directive. */
8799 static void
8800 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8802 tree block;
8803 gimple *scope_stmt = gsi_stmt (*gsi_p);
8804 gbind *bind;
8805 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8806 gimple_seq tred_dlist = NULL;
8808 push_gimplify_context ();
8810 block = make_node (BLOCK);
8811 bind = gimple_build_bind (NULL, NULL, block);
8812 gsi_replace (gsi_p, bind, true);
8813 bind_body = NULL;
8814 dlist = NULL;
8816 tree rclauses
8817 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8818 OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8819 if (rclauses)
8821 tree type = build_pointer_type (pointer_sized_int_node);
8822 tree temp = create_tmp_var (type);
8823 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8824 OMP_CLAUSE_DECL (c) = temp;
8825 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8826 gimple_omp_scope_set_clauses (scope_stmt, c);
8827 lower_omp_task_reductions (ctx, OMP_SCOPE,
8828 gimple_omp_scope_clauses (scope_stmt),
8829 &bind_body, &tred_dlist);
8830 rclauses = c;
8831 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8832 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8833 gimple_seq_add_stmt (&bind_body, stmt);
8836 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8837 &bind_body, &dlist, ctx, NULL);
8838 lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8840 gimple_seq_add_stmt (&bind_body, scope_stmt);
8842 gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8844 gimple_omp_set_body (scope_stmt, NULL);
8846 gimple_seq clist = NULL;
8847 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8848 &bind_body, &clist, ctx);
8849 if (clist)
8851 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8852 gcall *g = gimple_build_call (fndecl, 0);
8853 gimple_seq_add_stmt (&bind_body, g);
8854 gimple_seq_add_seq (&bind_body, clist);
8855 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8856 g = gimple_build_call (fndecl, 0);
8857 gimple_seq_add_stmt (&bind_body, g);
8860 gimple_seq_add_seq (&bind_body, dlist);
8862 bind_body = maybe_catch_exception (bind_body);
8864 bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
8865 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8866 gimple *g = gimple_build_omp_return (nowait);
8867 gimple_seq_add_stmt (&bind_body_tail, g);
8868 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
8869 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8870 if (ctx->record_type)
8872 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8873 tree clobber = build_clobber (ctx->record_type);
8874 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8875 clobber), GSI_SAME_STMT);
8877 gimple_seq_add_seq (&bind_body, bind_body_tail);
8879 gimple_bind_set_body (bind, bind_body);
8881 pop_gimplify_context (bind);
8883 gimple_bind_append_vars (bind, ctx->block_vars);
8884 BLOCK_VARS (block) = ctx->block_vars;
8885 if (BLOCK_VARS (block))
8886 TREE_USED (block) = 1;
8888 /* Expand code for an OpenMP master or masked directive. */
8890 static void
8891 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8893 tree block, lab = NULL, x, bfn_decl;
8894 gimple *stmt = gsi_stmt (*gsi_p);
8895 gbind *bind;
8896 location_t loc = gimple_location (stmt);
8897 gimple_seq tseq;
8898 tree filter = integer_zero_node;
8900 push_gimplify_context ();
8902 if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
8904 filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
8905 OMP_CLAUSE_FILTER);
8906 if (filter)
8907 filter = fold_convert (integer_type_node,
8908 OMP_CLAUSE_FILTER_EXPR (filter));
8909 else
8910 filter = integer_zero_node;
8912 block = make_node (BLOCK);
8913 bind = gimple_build_bind (NULL, NULL, block);
8914 gsi_replace (gsi_p, bind, true);
8915 gimple_bind_add_stmt (bind, stmt);
8917 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8918 x = build_call_expr_loc (loc, bfn_decl, 0);
8919 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
8920 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8921 tseq = NULL;
8922 gimplify_and_add (x, &tseq);
8923 gimple_bind_add_seq (bind, tseq);
8925 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8926 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8927 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8928 gimple_omp_set_body (stmt, NULL);
8930 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8932 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8934 pop_gimplify_context (bind);
8936 gimple_bind_append_vars (bind, ctx->block_vars);
8937 BLOCK_VARS (block) = ctx->block_vars;
8940 /* Helper function for lower_omp_task_reductions. For a specific PASS
8941 find out the current clause it should be processed, or return false
8942 if all have been processed already. */
8944 static inline bool
8945 omp_task_reduction_iterate (int pass, enum tree_code code,
8946 enum omp_clause_code ccode, tree *c, tree *decl,
8947 tree *type, tree *next)
8949 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8951 if (ccode == OMP_CLAUSE_REDUCTION
8952 && code != OMP_TASKLOOP
8953 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8954 continue;
8955 *decl = OMP_CLAUSE_DECL (*c);
8956 *type = TREE_TYPE (*decl);
8957 if (TREE_CODE (*decl) == MEM_REF)
8959 if (pass != 1)
8960 continue;
8962 else
8964 if (omp_privatize_by_reference (*decl))
8965 *type = TREE_TYPE (*type);
8966 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8967 continue;
8969 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8970 return true;
8972 *decl = NULL_TREE;
8973 *type = NULL_TREE;
8974 *next = NULL_TREE;
8975 return false;
8978 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8979 OMP_TASKGROUP only with task modifier). Register mapping of those in
8980 START sequence and reducing them and unregister them in the END sequence. */
8982 static void
8983 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8984 gimple_seq *start, gimple_seq *end)
8986 enum omp_clause_code ccode
8987 = (code == OMP_TASKGROUP
8988 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8989 tree cancellable = NULL_TREE;
8990 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8991 if (clauses == NULL_TREE)
8992 return;
8993 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
8995 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8996 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8997 && outer->cancellable)
8999 cancellable = error_mark_node;
9000 break;
9002 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
9003 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
9004 break;
9006 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
9007 tree *last = &TYPE_FIELDS (record_type);
9008 unsigned cnt = 0;
9009 if (cancellable)
9011 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9012 ptr_type_node);
9013 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9014 integer_type_node);
9015 *last = field;
9016 DECL_CHAIN (field) = ifield;
9017 last = &DECL_CHAIN (ifield);
9018 DECL_CONTEXT (field) = record_type;
9019 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9020 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9021 DECL_CONTEXT (ifield) = record_type;
9022 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9023 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9025 for (int pass = 0; pass < 2; pass++)
9027 tree decl, type, next;
9028 for (tree c = clauses;
9029 omp_task_reduction_iterate (pass, code, ccode,
9030 &c, &decl, &type, &next); c = next)
9032 ++cnt;
9033 tree new_type = type;
9034 if (ctx->outer)
9035 new_type = remap_type (type, &ctx->outer->cb);
9036 tree field
9037 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9038 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9039 new_type);
9040 if (DECL_P (decl) && type == TREE_TYPE (decl))
9042 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9043 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9044 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9046 else
9047 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9048 DECL_CONTEXT (field) = record_type;
9049 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9050 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9051 *last = field;
9052 last = &DECL_CHAIN (field);
9053 tree bfield
9054 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9055 boolean_type_node);
9056 DECL_CONTEXT (bfield) = record_type;
9057 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9058 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9059 *last = bfield;
9060 last = &DECL_CHAIN (bfield);
9063 *last = NULL_TREE;
9064 layout_type (record_type);
9066 /* Build up an array which registers with the runtime all the reductions
9067 and deregisters them at the end. Format documented in libgomp/task.c. */
9068 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9069 tree avar = create_tmp_var_raw (atype);
9070 gimple_add_tmp_var (avar);
9071 TREE_ADDRESSABLE (avar) = 1;
9072 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9073 NULL_TREE, NULL_TREE);
9074 tree t = build_int_cst (pointer_sized_int_node, cnt);
9075 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9076 gimple_seq seq = NULL;
9077 tree sz = fold_convert (pointer_sized_int_node,
9078 TYPE_SIZE_UNIT (record_type));
9079 int cachesz = 64;
9080 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9081 build_int_cst (pointer_sized_int_node, cachesz - 1));
9082 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9083 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9084 ctx->task_reductions.create (1 + cnt);
9085 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9086 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9087 ? sz : NULL_TREE);
9088 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9089 gimple_seq_add_seq (start, seq);
9090 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9091 NULL_TREE, NULL_TREE);
9092 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9093 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9094 NULL_TREE, NULL_TREE);
9095 t = build_int_cst (pointer_sized_int_node,
9096 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9097 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9098 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9099 NULL_TREE, NULL_TREE);
9100 t = build_int_cst (pointer_sized_int_node, -1);
9101 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9102 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9103 NULL_TREE, NULL_TREE);
9104 t = build_int_cst (pointer_sized_int_node, 0);
9105 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9107 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9108 and for each task reduction checks a bool right after the private variable
9109 within that thread's chunk; if the bool is clear, it hasn't been
9110 initialized and thus isn't going to be reduced nor destructed, otherwise
9111 reduce and destruct it. */
9112 tree idx = create_tmp_var (size_type_node);
9113 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9114 tree num_thr_sz = create_tmp_var (size_type_node);
9115 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9116 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9117 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9118 gimple *g;
9119 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9121 /* For worksharing constructs or scope, only perform it in the master
9122 thread, with the exception of cancelled implicit barriers - then only
9123 handle the current thread. */
9124 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9125 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9126 tree thr_num = create_tmp_var (integer_type_node);
9127 g = gimple_build_call (t, 0);
9128 gimple_call_set_lhs (g, thr_num);
9129 gimple_seq_add_stmt (end, g);
9130 if (cancellable)
9132 tree c;
9133 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9134 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9135 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9136 if (code == OMP_FOR)
9137 c = gimple_omp_for_clauses (ctx->stmt);
9138 else if (code == OMP_SECTIONS)
9139 c = gimple_omp_sections_clauses (ctx->stmt);
9140 else /* if (code == OMP_SCOPE) */
9141 c = gimple_omp_scope_clauses (ctx->stmt);
9142 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9143 cancellable = c;
9144 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9145 lab5, lab6);
9146 gimple_seq_add_stmt (end, g);
9147 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9148 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9149 gimple_seq_add_stmt (end, g);
9150 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9151 build_one_cst (TREE_TYPE (idx)));
9152 gimple_seq_add_stmt (end, g);
9153 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9154 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9156 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9157 gimple_seq_add_stmt (end, g);
9158 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9160 if (code != OMP_PARALLEL)
9162 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9163 tree num_thr = create_tmp_var (integer_type_node);
9164 g = gimple_build_call (t, 0);
9165 gimple_call_set_lhs (g, num_thr);
9166 gimple_seq_add_stmt (end, g);
9167 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9168 gimple_seq_add_stmt (end, g);
9169 if (cancellable)
9170 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9172 else
9174 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9175 OMP_CLAUSE__REDUCTEMP_);
9176 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9177 t = fold_convert (size_type_node, t);
9178 gimplify_assign (num_thr_sz, t, end);
9180 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9181 NULL_TREE, NULL_TREE);
9182 tree data = create_tmp_var (pointer_sized_int_node);
9183 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9184 if (code == OMP_TASKLOOP)
9186 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9187 g = gimple_build_cond (NE_EXPR, data,
9188 build_zero_cst (pointer_sized_int_node),
9189 lab1, lab7);
9190 gimple_seq_add_stmt (end, g);
9192 gimple_seq_add_stmt (end, gimple_build_label (lab1));
9193 tree ptr;
9194 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9195 ptr = create_tmp_var (build_pointer_type (record_type));
9196 else
9197 ptr = create_tmp_var (ptr_type_node);
9198 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9200 tree field = TYPE_FIELDS (record_type);
9201 cnt = 0;
9202 if (cancellable)
9203 field = DECL_CHAIN (DECL_CHAIN (field));
9204 for (int pass = 0; pass < 2; pass++)
9206 tree decl, type, next;
9207 for (tree c = clauses;
9208 omp_task_reduction_iterate (pass, code, ccode,
9209 &c, &decl, &type, &next); c = next)
9211 tree var = decl, ref;
9212 if (TREE_CODE (decl) == MEM_REF)
9214 var = TREE_OPERAND (var, 0);
9215 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9216 var = TREE_OPERAND (var, 0);
9217 tree v = var;
9218 if (TREE_CODE (var) == ADDR_EXPR)
9219 var = TREE_OPERAND (var, 0);
9220 else if (TREE_CODE (var) == INDIRECT_REF)
9221 var = TREE_OPERAND (var, 0);
9222 tree orig_var = var;
9223 if (is_variable_sized (var))
9225 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9226 var = DECL_VALUE_EXPR (var);
9227 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
9228 var = TREE_OPERAND (var, 0);
9229 gcc_assert (DECL_P (var));
9231 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9232 if (orig_var != var)
9233 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9234 else if (TREE_CODE (v) == ADDR_EXPR)
9235 t = build_fold_addr_expr (t);
9236 else if (TREE_CODE (v) == INDIRECT_REF)
9237 t = build_fold_indirect_ref (t);
9238 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9240 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9241 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9242 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9244 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9245 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9246 fold_convert (size_type_node,
9247 TREE_OPERAND (decl, 1)));
9249 else
9251 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9252 if (!omp_privatize_by_reference (decl))
9253 t = build_fold_addr_expr (t);
9255 t = fold_convert (pointer_sized_int_node, t);
9256 seq = NULL;
9257 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9258 gimple_seq_add_seq (start, seq);
9259 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9260 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9261 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9262 t = unshare_expr (byte_position (field));
9263 t = fold_convert (pointer_sized_int_node, t);
9264 ctx->task_reduction_map->put (c, cnt);
9265 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9266 ? t : NULL_TREE);
9267 seq = NULL;
9268 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9269 gimple_seq_add_seq (start, seq);
9270 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9271 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9272 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9274 tree bfield = DECL_CHAIN (field);
9275 tree cond;
9276 if (code == OMP_PARALLEL
9277 || code == OMP_FOR
9278 || code == OMP_SECTIONS
9279 || code == OMP_SCOPE)
9280 /* In parallel, worksharing or scope all threads unconditionally
9281 initialize all their task reduction private variables. */
9282 cond = boolean_true_node;
9283 else if (TREE_TYPE (ptr) == ptr_type_node)
9285 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9286 unshare_expr (byte_position (bfield)));
9287 seq = NULL;
9288 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9289 gimple_seq_add_seq (end, seq);
9290 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9291 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9292 build_int_cst (pbool, 0));
9294 else
9295 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9296 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9297 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9298 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9299 tree condv = create_tmp_var (boolean_type_node);
9300 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9301 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9302 lab3, lab4);
9303 gimple_seq_add_stmt (end, g);
9304 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9305 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9307 /* If this reduction doesn't need destruction and parallel
9308 has been cancelled, there is nothing to do for this
9309 reduction, so jump around the merge operation. */
9310 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9311 g = gimple_build_cond (NE_EXPR, cancellable,
9312 build_zero_cst (TREE_TYPE (cancellable)),
9313 lab4, lab5);
9314 gimple_seq_add_stmt (end, g);
9315 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9318 tree new_var;
9319 if (TREE_TYPE (ptr) == ptr_type_node)
9321 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9322 unshare_expr (byte_position (field)));
9323 seq = NULL;
9324 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9325 gimple_seq_add_seq (end, seq);
9326 tree pbool = build_pointer_type (TREE_TYPE (field));
9327 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9328 build_int_cst (pbool, 0));
9330 else
9331 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9332 build_simple_mem_ref (ptr), field, NULL_TREE);
9334 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9335 if (TREE_CODE (decl) != MEM_REF
9336 && omp_privatize_by_reference (decl))
9337 ref = build_simple_mem_ref (ref);
9338 /* reduction(-:var) sums up the partial results, so it acts
9339 identically to reduction(+:var). */
9340 if (rcode == MINUS_EXPR)
9341 rcode = PLUS_EXPR;
9342 if (TREE_CODE (decl) == MEM_REF)
9344 tree type = TREE_TYPE (new_var);
9345 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9346 tree i = create_tmp_var (TREE_TYPE (v));
9347 tree ptype = build_pointer_type (TREE_TYPE (type));
9348 if (DECL_P (v))
9350 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9351 tree vv = create_tmp_var (TREE_TYPE (v));
9352 gimplify_assign (vv, v, start);
9353 v = vv;
9355 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9356 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9357 new_var = build_fold_addr_expr (new_var);
9358 new_var = fold_convert (ptype, new_var);
9359 ref = fold_convert (ptype, ref);
9360 tree m = create_tmp_var (ptype);
9361 gimplify_assign (m, new_var, end);
9362 new_var = m;
9363 m = create_tmp_var (ptype);
9364 gimplify_assign (m, ref, end);
9365 ref = m;
9366 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9367 tree body = create_artificial_label (UNKNOWN_LOCATION);
9368 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9369 gimple_seq_add_stmt (end, gimple_build_label (body));
9370 tree priv = build_simple_mem_ref (new_var);
9371 tree out = build_simple_mem_ref (ref);
9372 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9374 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9375 tree decl_placeholder
9376 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9377 tree lab6 = NULL_TREE;
9378 if (cancellable)
9380 /* If this reduction needs destruction and parallel
9381 has been cancelled, jump around the merge operation
9382 to the destruction. */
9383 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9384 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9385 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9386 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9387 lab6, lab5);
9388 gimple_seq_add_stmt (end, g);
9389 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9391 SET_DECL_VALUE_EXPR (placeholder, out);
9392 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9393 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9394 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9395 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9396 gimple_seq_add_seq (end,
9397 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9398 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9399 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9401 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9402 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9404 if (cancellable)
9405 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9406 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9407 if (x)
9409 gimple_seq tseq = NULL;
9410 gimplify_stmt (&x, &tseq);
9411 gimple_seq_add_seq (end, tseq);
9414 else
9416 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9417 out = unshare_expr (out);
9418 gimplify_assign (out, x, end);
9420 gimple *g
9421 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9422 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9423 gimple_seq_add_stmt (end, g);
9424 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9425 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9426 gimple_seq_add_stmt (end, g);
9427 g = gimple_build_assign (i, PLUS_EXPR, i,
9428 build_int_cst (TREE_TYPE (i), 1));
9429 gimple_seq_add_stmt (end, g);
9430 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9431 gimple_seq_add_stmt (end, g);
9432 gimple_seq_add_stmt (end, gimple_build_label (endl));
9434 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9436 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9437 tree oldv = NULL_TREE;
9438 tree lab6 = NULL_TREE;
9439 if (cancellable)
9441 /* If this reduction needs destruction and parallel
9442 has been cancelled, jump around the merge operation
9443 to the destruction. */
9444 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9445 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9446 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9447 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9448 lab6, lab5);
9449 gimple_seq_add_stmt (end, g);
9450 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9452 if (omp_privatize_by_reference (decl)
9453 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9454 TREE_TYPE (ref)))
9455 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9456 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9457 tree refv = create_tmp_var (TREE_TYPE (ref));
9458 gimplify_assign (refv, ref, end);
9459 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9460 SET_DECL_VALUE_EXPR (placeholder, ref);
9461 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9462 tree d = maybe_lookup_decl (decl, ctx);
9463 gcc_assert (d);
9464 if (DECL_HAS_VALUE_EXPR_P (d))
9465 oldv = DECL_VALUE_EXPR (d);
9466 if (omp_privatize_by_reference (var))
9468 tree v = fold_convert (TREE_TYPE (d),
9469 build_fold_addr_expr (new_var));
9470 SET_DECL_VALUE_EXPR (d, v);
9472 else
9473 SET_DECL_VALUE_EXPR (d, new_var);
9474 DECL_HAS_VALUE_EXPR_P (d) = 1;
9475 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9476 if (oldv)
9477 SET_DECL_VALUE_EXPR (d, oldv);
9478 else
9480 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9481 DECL_HAS_VALUE_EXPR_P (d) = 0;
9483 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9484 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9485 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9486 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9487 if (cancellable)
9488 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9489 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9490 if (x)
9492 gimple_seq tseq = NULL;
9493 gimplify_stmt (&x, &tseq);
9494 gimple_seq_add_seq (end, tseq);
9497 else
9499 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9500 ref = unshare_expr (ref);
9501 gimplify_assign (ref, x, end);
9503 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9504 ++cnt;
9505 field = DECL_CHAIN (bfield);
9509 if (code == OMP_TASKGROUP)
9511 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9512 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9513 gimple_seq_add_stmt (start, g);
9515 else
9517 tree c;
9518 if (code == OMP_FOR)
9519 c = gimple_omp_for_clauses (ctx->stmt);
9520 else if (code == OMP_SECTIONS)
9521 c = gimple_omp_sections_clauses (ctx->stmt);
9522 else if (code == OMP_SCOPE)
9523 c = gimple_omp_scope_clauses (ctx->stmt);
9524 else
9525 c = gimple_omp_taskreg_clauses (ctx->stmt);
9526 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9527 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9528 build_fold_addr_expr (avar));
9529 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9532 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9533 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9534 size_one_node));
9535 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9536 gimple_seq_add_stmt (end, g);
9537 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9538 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9540 enum built_in_function bfn
9541 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9542 t = builtin_decl_explicit (bfn);
9543 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9544 tree arg;
9545 if (cancellable)
9547 arg = create_tmp_var (c_bool_type);
9548 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9549 cancellable));
9551 else
9552 arg = build_int_cst (c_bool_type, 0);
9553 g = gimple_build_call (t, 1, arg);
9555 else
9557 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9558 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9560 gimple_seq_add_stmt (end, g);
9561 if (lab7)
9562 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9563 t = build_constructor (atype, NULL);
9564 TREE_THIS_VOLATILE (t) = 1;
9565 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9568 /* Expand code for an OpenMP taskgroup directive. */
9570 static void
9571 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9573 gimple *stmt = gsi_stmt (*gsi_p);
9574 gcall *x;
9575 gbind *bind;
9576 gimple_seq dseq = NULL;
9577 tree block = make_node (BLOCK);
9579 bind = gimple_build_bind (NULL, NULL, block);
9580 gsi_replace (gsi_p, bind, true);
9581 gimple_bind_add_stmt (bind, stmt);
9583 push_gimplify_context ();
9585 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9587 gimple_bind_add_stmt (bind, x);
9589 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9590 gimple_omp_taskgroup_clauses (stmt),
9591 gimple_bind_body_ptr (bind), &dseq);
9593 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9594 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9595 gimple_omp_set_body (stmt, NULL);
9597 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9598 gimple_bind_add_seq (bind, dseq);
9600 pop_gimplify_context (bind);
9602 gimple_bind_append_vars (bind, ctx->block_vars);
9603 BLOCK_VARS (block) = ctx->block_vars;
9607 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9609 static void
9610 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9611 omp_context *ctx)
9613 struct omp_for_data fd;
9614 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9615 return;
9617 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9618 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9619 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9620 if (!fd.ordered)
9621 return;
9623 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9624 tree c = gimple_omp_ordered_clauses (ord_stmt);
9625 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9626 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9628 /* Merge depend clauses from multiple adjacent
9629 #pragma omp ordered depend(sink:...) constructs
9630 into one #pragma omp ordered depend(sink:...), so that
9631 we can optimize them together. */
9632 gimple_stmt_iterator gsi = *gsi_p;
9633 gsi_next (&gsi);
9634 while (!gsi_end_p (gsi))
9636 gimple *stmt = gsi_stmt (gsi);
9637 if (is_gimple_debug (stmt)
9638 || gimple_code (stmt) == GIMPLE_NOP)
9640 gsi_next (&gsi);
9641 continue;
9643 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9644 break;
9645 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9646 c = gimple_omp_ordered_clauses (ord_stmt2);
9647 if (c == NULL_TREE
9648 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
9649 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9650 break;
9651 while (*list_p)
9652 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9653 *list_p = c;
9654 gsi_remove (&gsi, true);
9658 /* Canonicalize sink dependence clauses into one folded clause if
9659 possible.
9661 The basic algorithm is to create a sink vector whose first
9662 element is the GCD of all the first elements, and whose remaining
9663 elements are the minimum of the subsequent columns.
9665 We ignore dependence vectors whose first element is zero because
9666 such dependencies are known to be executed by the same thread.
9668 We take into account the direction of the loop, so a minimum
9669 becomes a maximum if the loop is iterating forwards. We also
9670 ignore sink clauses where the loop direction is unknown, or where
9671 the offsets are clearly invalid because they are not a multiple
9672 of the loop increment.
9674 For example:
9676 #pragma omp for ordered(2)
9677 for (i=0; i < N; ++i)
9678 for (j=0; j < M; ++j)
9680 #pragma omp ordered \
9681 depend(sink:i-8,j-2) \
9682 depend(sink:i,j-1) \ // Completely ignored because i+0.
9683 depend(sink:i-4,j-3) \
9684 depend(sink:i-6,j-4)
9685 #pragma omp ordered depend(source)
9688 Folded clause is:
9690 depend(sink:-gcd(8,4,6),-min(2,3,4))
9691 -or-
9692 depend(sink:-2,-2)
9695 /* FIXME: Computing GCD's where the first element is zero is
9696 non-trivial in the presence of collapsed loops. Do this later. */
9697 if (fd.collapse > 1)
9698 return;
9700 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9702 /* wide_int is not a POD so it must be default-constructed. */
9703 for (unsigned i = 0; i != 2 * len - 1; ++i)
9704 new (static_cast<void*>(folded_deps + i)) wide_int ();
9706 tree folded_dep = NULL_TREE;
9707 /* TRUE if the first dimension's offset is negative. */
9708 bool neg_offset_p = false;
9710 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9711 unsigned int i;
9712 while ((c = *list_p) != NULL)
9714 bool remove = false;
9716 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
9717 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9718 goto next_ordered_clause;
9720 tree vec;
9721 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9722 vec && TREE_CODE (vec) == TREE_LIST;
9723 vec = TREE_CHAIN (vec), ++i)
9725 gcc_assert (i < len);
9727 /* omp_extract_for_data has canonicalized the condition. */
9728 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9729 || fd.loops[i].cond_code == GT_EXPR);
9730 bool forward = fd.loops[i].cond_code == LT_EXPR;
9731 bool maybe_lexically_later = true;
9733 /* While the committee makes up its mind, bail if we have any
9734 non-constant steps. */
9735 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9736 goto lower_omp_ordered_ret;
9738 tree itype = TREE_TYPE (TREE_VALUE (vec));
9739 if (POINTER_TYPE_P (itype))
9740 itype = sizetype;
9741 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9742 TYPE_PRECISION (itype),
9743 TYPE_SIGN (itype));
9745 /* Ignore invalid offsets that are not multiples of the step. */
9746 if (!wi::multiple_of_p (wi::abs (offset),
9747 wi::abs (wi::to_wide (fd.loops[i].step)),
9748 UNSIGNED))
9750 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9751 "ignoring sink clause with offset that is not "
9752 "a multiple of the loop step");
9753 remove = true;
9754 goto next_ordered_clause;
9757 /* Calculate the first dimension. The first dimension of
9758 the folded dependency vector is the GCD of the first
9759 elements, while ignoring any first elements whose offset
9760 is 0. */
9761 if (i == 0)
9763 /* Ignore dependence vectors whose first dimension is 0. */
9764 if (offset == 0)
9766 remove = true;
9767 goto next_ordered_clause;
9769 else
9771 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9773 error_at (OMP_CLAUSE_LOCATION (c),
9774 "first offset must be in opposite direction "
9775 "of loop iterations");
9776 goto lower_omp_ordered_ret;
9778 if (forward)
9779 offset = -offset;
9780 neg_offset_p = forward;
9781 /* Initialize the first time around. */
9782 if (folded_dep == NULL_TREE)
9784 folded_dep = c;
9785 folded_deps[0] = offset;
9787 else
9788 folded_deps[0] = wi::gcd (folded_deps[0],
9789 offset, UNSIGNED);
9792 /* Calculate minimum for the remaining dimensions. */
9793 else
9795 folded_deps[len + i - 1] = offset;
9796 if (folded_dep == c)
9797 folded_deps[i] = offset;
9798 else if (maybe_lexically_later
9799 && !wi::eq_p (folded_deps[i], offset))
9801 if (forward ^ wi::gts_p (folded_deps[i], offset))
9803 unsigned int j;
9804 folded_dep = c;
9805 for (j = 1; j <= i; j++)
9806 folded_deps[j] = folded_deps[len + j - 1];
9808 else
9809 maybe_lexically_later = false;
9813 gcc_assert (i == len);
9815 remove = true;
9817 next_ordered_clause:
9818 if (remove)
9819 *list_p = OMP_CLAUSE_CHAIN (c);
9820 else
9821 list_p = &OMP_CLAUSE_CHAIN (c);
9824 if (folded_dep)
9826 if (neg_offset_p)
9827 folded_deps[0] = -folded_deps[0];
9829 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9830 if (POINTER_TYPE_P (itype))
9831 itype = sizetype;
9833 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9834 = wide_int_to_tree (itype, folded_deps[0]);
9835 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9836 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9839 lower_omp_ordered_ret:
9841 /* Ordered without clauses is #pragma omp threads, while we want
9842 a nop instead if we remove all clauses. */
9843 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9844 gsi_replace (gsi_p, gimple_build_nop (), true);
9848 /* Expand code for an OpenMP ordered directive. */
9850 static void
9851 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9853 tree block;
9854 gimple *stmt = gsi_stmt (*gsi_p), *g;
9855 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9856 gcall *x;
9857 gbind *bind;
9858 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9859 OMP_CLAUSE_SIMD);
9860 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9861 loop. */
9862 bool maybe_simt
9863 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9864 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9865 OMP_CLAUSE_THREADS);
9867 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9868 OMP_CLAUSE_DEPEND))
9870 /* FIXME: This is needs to be moved to the expansion to verify various
9871 conditions only testable on cfg with dominators computed, and also
9872 all the depend clauses to be merged still might need to be available
9873 for the runtime checks. */
9874 if (0)
9875 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9876 return;
9879 push_gimplify_context ();
9881 block = make_node (BLOCK);
9882 bind = gimple_build_bind (NULL, NULL, block);
9883 gsi_replace (gsi_p, bind, true);
9884 gimple_bind_add_stmt (bind, stmt);
9886 if (simd)
9888 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9889 build_int_cst (NULL_TREE, threads));
9890 cfun->has_simduid_loops = true;
9892 else
9893 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9895 gimple_bind_add_stmt (bind, x);
9897 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9898 if (maybe_simt)
9900 counter = create_tmp_var (integer_type_node);
9901 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9902 gimple_call_set_lhs (g, counter);
9903 gimple_bind_add_stmt (bind, g);
9905 body = create_artificial_label (UNKNOWN_LOCATION);
9906 test = create_artificial_label (UNKNOWN_LOCATION);
9907 gimple_bind_add_stmt (bind, gimple_build_label (body));
9909 tree simt_pred = create_tmp_var (integer_type_node);
9910 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9911 gimple_call_set_lhs (g, simt_pred);
9912 gimple_bind_add_stmt (bind, g);
9914 tree t = create_artificial_label (UNKNOWN_LOCATION);
9915 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9916 gimple_bind_add_stmt (bind, g);
9918 gimple_bind_add_stmt (bind, gimple_build_label (t));
9920 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9921 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9922 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9923 gimple_omp_set_body (stmt, NULL);
9925 if (maybe_simt)
9927 gimple_bind_add_stmt (bind, gimple_build_label (test));
9928 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9929 gimple_bind_add_stmt (bind, g);
9931 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9932 tree nonneg = create_tmp_var (integer_type_node);
9933 gimple_seq tseq = NULL;
9934 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9935 gimple_bind_add_seq (bind, tseq);
9937 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9938 gimple_call_set_lhs (g, nonneg);
9939 gimple_bind_add_stmt (bind, g);
9941 tree end = create_artificial_label (UNKNOWN_LOCATION);
9942 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9943 gimple_bind_add_stmt (bind, g);
9945 gimple_bind_add_stmt (bind, gimple_build_label (end));
9947 if (simd)
9948 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9949 build_int_cst (NULL_TREE, threads));
9950 else
9951 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9953 gimple_bind_add_stmt (bind, x);
9955 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9957 pop_gimplify_context (bind);
9959 gimple_bind_append_vars (bind, ctx->block_vars);
9960 BLOCK_VARS (block) = gimple_bind_vars (bind);
9964 /* Expand code for an OpenMP scan directive and the structured block
9965 before the scan directive. */
9967 static void
9968 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9970 gimple *stmt = gsi_stmt (*gsi_p);
9971 bool has_clauses
9972 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9973 tree lane = NULL_TREE;
9974 gimple_seq before = NULL;
9975 omp_context *octx = ctx->outer;
9976 gcc_assert (octx);
9977 if (octx->scan_exclusive && !has_clauses)
9979 gimple_stmt_iterator gsi2 = *gsi_p;
9980 gsi_next (&gsi2);
9981 gimple *stmt2 = gsi_stmt (gsi2);
9982 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9983 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9984 the one with exclusive clause(s), comes first. */
9985 if (stmt2
9986 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9987 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9989 gsi_remove (gsi_p, false);
9990 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9991 ctx = maybe_lookup_ctx (stmt2);
9992 gcc_assert (ctx);
9993 lower_omp_scan (gsi_p, ctx);
9994 return;
9998 bool input_phase = has_clauses ^ octx->scan_inclusive;
9999 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10000 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
10001 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10002 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
10003 && !gimple_omp_for_combined_p (octx->stmt));
10004 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
10005 if (is_for_simd && octx->for_simd_scan_phase)
10006 is_simd = false;
10007 if (is_simd)
10008 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
10009 OMP_CLAUSE__SIMDUID_))
10011 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10012 lane = create_tmp_var (unsigned_type_node);
10013 tree t = build_int_cst (integer_type_node,
10014 input_phase ? 1
10015 : octx->scan_inclusive ? 2 : 3);
10016 gimple *g
10017 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10018 gimple_call_set_lhs (g, lane);
10019 gimple_seq_add_stmt (&before, g);
10022 if (is_simd || is_for)
10024 for (tree c = gimple_omp_for_clauses (octx->stmt);
10025 c; c = OMP_CLAUSE_CHAIN (c))
10026 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10027 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10029 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10030 tree var = OMP_CLAUSE_DECL (c);
10031 tree new_var = lookup_decl (var, octx);
10032 tree val = new_var;
10033 tree var2 = NULL_TREE;
10034 tree var3 = NULL_TREE;
10035 tree var4 = NULL_TREE;
10036 tree lane0 = NULL_TREE;
10037 tree new_vard = new_var;
10038 if (omp_privatize_by_reference (var))
10040 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10041 val = new_var;
10043 if (DECL_HAS_VALUE_EXPR_P (new_vard))
10045 val = DECL_VALUE_EXPR (new_vard);
10046 if (new_vard != new_var)
10048 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10049 val = TREE_OPERAND (val, 0);
10051 if (TREE_CODE (val) == ARRAY_REF
10052 && VAR_P (TREE_OPERAND (val, 0)))
10054 tree v = TREE_OPERAND (val, 0);
10055 if (lookup_attribute ("omp simd array",
10056 DECL_ATTRIBUTES (v)))
10058 val = unshare_expr (val);
10059 lane0 = TREE_OPERAND (val, 1);
10060 TREE_OPERAND (val, 1) = lane;
10061 var2 = lookup_decl (v, octx);
10062 if (octx->scan_exclusive)
10063 var4 = lookup_decl (var2, octx);
10064 if (input_phase
10065 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10066 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10067 if (!input_phase)
10069 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10070 var2, lane, NULL_TREE, NULL_TREE);
10071 TREE_THIS_NOTRAP (var2) = 1;
10072 if (octx->scan_exclusive)
10074 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10075 var4, lane, NULL_TREE,
10076 NULL_TREE);
10077 TREE_THIS_NOTRAP (var4) = 1;
10080 else
10081 var2 = val;
10084 gcc_assert (var2);
10086 else
10088 var2 = build_outer_var_ref (var, octx);
10089 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10091 var3 = maybe_lookup_decl (new_vard, octx);
10092 if (var3 == new_vard || var3 == NULL_TREE)
10093 var3 = NULL_TREE;
10094 else if (is_simd && octx->scan_exclusive && !input_phase)
10096 var4 = maybe_lookup_decl (var3, octx);
10097 if (var4 == var3 || var4 == NULL_TREE)
10099 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10101 var4 = var3;
10102 var3 = NULL_TREE;
10104 else
10105 var4 = NULL_TREE;
10109 if (is_simd
10110 && octx->scan_exclusive
10111 && !input_phase
10112 && var4 == NULL_TREE)
10113 var4 = create_tmp_var (TREE_TYPE (val));
10115 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10117 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10118 if (input_phase)
10120 if (var3)
10122 /* If we've added a separate identity element
10123 variable, copy it over into val. */
10124 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10125 var3);
10126 gimplify_and_add (x, &before);
10128 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10130 /* Otherwise, assign to it the identity element. */
10131 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10132 if (is_for)
10133 tseq = copy_gimple_seq_and_replace_locals (tseq);
10134 tree ref = build_outer_var_ref (var, octx);
10135 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10136 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10137 if (x)
10139 if (new_vard != new_var)
10140 val = build_fold_addr_expr_loc (clause_loc, val);
10141 SET_DECL_VALUE_EXPR (new_vard, val);
10143 SET_DECL_VALUE_EXPR (placeholder, ref);
10144 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10145 lower_omp (&tseq, octx);
10146 if (x)
10147 SET_DECL_VALUE_EXPR (new_vard, x);
10148 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10149 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10150 gimple_seq_add_seq (&before, tseq);
10151 if (is_simd)
10152 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10155 else if (is_simd)
10157 tree x;
10158 if (octx->scan_exclusive)
10160 tree v4 = unshare_expr (var4);
10161 tree v2 = unshare_expr (var2);
10162 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10163 gimplify_and_add (x, &before);
10165 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10166 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10167 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10168 tree vexpr = val;
10169 if (x && new_vard != new_var)
10170 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10171 if (x)
10172 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10173 SET_DECL_VALUE_EXPR (placeholder, var2);
10174 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10175 lower_omp (&tseq, octx);
10176 gimple_seq_add_seq (&before, tseq);
10177 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10178 if (x)
10179 SET_DECL_VALUE_EXPR (new_vard, x);
10180 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10181 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10182 if (octx->scan_inclusive)
10184 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10185 var2);
10186 gimplify_and_add (x, &before);
10188 else if (lane0 == NULL_TREE)
10190 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10191 var4);
10192 gimplify_and_add (x, &before);
10196 else
10198 if (input_phase)
10200 /* input phase. Set val to initializer before
10201 the body. */
10202 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10203 gimplify_assign (val, x, &before);
10205 else if (is_simd)
10207 /* scan phase. */
10208 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10209 if (code == MINUS_EXPR)
10210 code = PLUS_EXPR;
10212 tree x = build2 (code, TREE_TYPE (var2),
10213 unshare_expr (var2), unshare_expr (val));
10214 if (octx->scan_inclusive)
10216 gimplify_assign (unshare_expr (var2), x, &before);
10217 gimplify_assign (val, var2, &before);
10219 else
10221 gimplify_assign (unshare_expr (var4),
10222 unshare_expr (var2), &before);
10223 gimplify_assign (var2, x, &before);
10224 if (lane0 == NULL_TREE)
10225 gimplify_assign (val, var4, &before);
10229 if (octx->scan_exclusive && !input_phase && lane0)
10231 tree vexpr = unshare_expr (var4);
10232 TREE_OPERAND (vexpr, 1) = lane0;
10233 if (new_vard != new_var)
10234 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10235 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10239 if (is_simd && !is_for_simd)
10241 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10242 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10243 gsi_replace (gsi_p, gimple_build_nop (), true);
10244 return;
10246 lower_omp (gimple_omp_body_ptr (stmt), octx);
10247 if (before)
10249 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
10250 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10255 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10256 substitution of a couple of function calls. But in the NAMED case,
10257 requires that languages coordinate a symbol name. It is therefore
10258 best put here in common code. */
10260 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10262 static void
10263 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10265 tree block;
10266 tree name, lock, unlock;
10267 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10268 gbind *bind;
10269 location_t loc = gimple_location (stmt);
10270 gimple_seq tbody;
10272 name = gimple_omp_critical_name (stmt);
10273 if (name)
10275 tree decl;
10277 if (!critical_name_mutexes)
10278 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10280 tree *n = critical_name_mutexes->get (name);
10281 if (n == NULL)
10283 char *new_str;
10285 decl = create_tmp_var_raw (ptr_type_node);
10287 new_str = ACONCAT ((".gomp_critical_user_",
10288 IDENTIFIER_POINTER (name), NULL));
10289 DECL_NAME (decl) = get_identifier (new_str);
10290 TREE_PUBLIC (decl) = 1;
10291 TREE_STATIC (decl) = 1;
10292 DECL_COMMON (decl) = 1;
10293 DECL_ARTIFICIAL (decl) = 1;
10294 DECL_IGNORED_P (decl) = 1;
10296 varpool_node::finalize_decl (decl);
10298 critical_name_mutexes->put (name, decl);
10300 else
10301 decl = *n;
10303 /* If '#pragma omp critical' is inside offloaded region or
10304 inside function marked as offloadable, the symbol must be
10305 marked as offloadable too. */
10306 omp_context *octx;
10307 if (cgraph_node::get (current_function_decl)->offloadable)
10308 varpool_node::get_create (decl)->offloadable = 1;
10309 else
10310 for (octx = ctx->outer; octx; octx = octx->outer)
10311 if (is_gimple_omp_offloaded (octx->stmt))
10313 varpool_node::get_create (decl)->offloadable = 1;
10314 break;
10317 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10318 lock = build_call_expr_loc (loc, lock, 1,
10319 build_fold_addr_expr_loc (loc, decl));
10321 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10322 unlock = build_call_expr_loc (loc, unlock, 1,
10323 build_fold_addr_expr_loc (loc, decl));
10325 else
10327 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10328 lock = build_call_expr_loc (loc, lock, 0);
10330 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10331 unlock = build_call_expr_loc (loc, unlock, 0);
10334 push_gimplify_context ();
10336 block = make_node (BLOCK);
10337 bind = gimple_build_bind (NULL, NULL, block);
10338 gsi_replace (gsi_p, bind, true);
10339 gimple_bind_add_stmt (bind, stmt);
10341 tbody = gimple_bind_body (bind);
10342 gimplify_and_add (lock, &tbody);
10343 gimple_bind_set_body (bind, tbody);
10345 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10346 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10347 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10348 gimple_omp_set_body (stmt, NULL);
10350 tbody = gimple_bind_body (bind);
10351 gimplify_and_add (unlock, &tbody);
10352 gimple_bind_set_body (bind, tbody);
10354 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10356 pop_gimplify_context (bind);
10357 gimple_bind_append_vars (bind, ctx->block_vars);
10358 BLOCK_VARS (block) = gimple_bind_vars (bind);
10361 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10362 for a lastprivate clause. Given a loop control predicate of (V
10363 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10364 is appended to *DLIST, iterator initialization is appended to
10365 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10366 to be emitted in a critical section. */
10368 static void
10369 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10370 gimple_seq *dlist, gimple_seq *clist,
10371 struct omp_context *ctx)
10373 tree clauses, cond, vinit;
10374 enum tree_code cond_code;
10375 gimple_seq stmts;
10377 cond_code = fd->loop.cond_code;
10378 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10380 /* When possible, use a strict equality expression. This can let VRP
10381 type optimizations deduce the value and remove a copy. */
10382 if (tree_fits_shwi_p (fd->loop.step))
10384 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10385 if (step == 1 || step == -1)
10386 cond_code = EQ_EXPR;
10389 tree n2 = fd->loop.n2;
10390 if (fd->collapse > 1
10391 && TREE_CODE (n2) != INTEGER_CST
10392 && gimple_omp_for_combined_into_p (fd->for_stmt))
10394 struct omp_context *taskreg_ctx = NULL;
10395 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10397 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10398 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10399 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10401 if (gimple_omp_for_combined_into_p (gfor))
10403 gcc_assert (ctx->outer->outer
10404 && is_parallel_ctx (ctx->outer->outer));
10405 taskreg_ctx = ctx->outer->outer;
10407 else
10409 struct omp_for_data outer_fd;
10410 omp_extract_for_data (gfor, &outer_fd, NULL);
10411 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10414 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10415 taskreg_ctx = ctx->outer->outer;
10417 else if (is_taskreg_ctx (ctx->outer))
10418 taskreg_ctx = ctx->outer;
10419 if (taskreg_ctx)
10421 int i;
10422 tree taskreg_clauses
10423 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10424 tree innerc = omp_find_clause (taskreg_clauses,
10425 OMP_CLAUSE__LOOPTEMP_);
10426 gcc_assert (innerc);
10427 int count = fd->collapse;
10428 if (fd->non_rect
10429 && fd->last_nonrect == fd->first_nonrect + 1)
10430 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10431 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10432 count += 4;
10433 for (i = 0; i < count; i++)
10435 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10436 OMP_CLAUSE__LOOPTEMP_);
10437 gcc_assert (innerc);
10439 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10440 OMP_CLAUSE__LOOPTEMP_);
10441 if (innerc)
10442 n2 = fold_convert (TREE_TYPE (n2),
10443 lookup_decl (OMP_CLAUSE_DECL (innerc),
10444 taskreg_ctx));
10447 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10449 clauses = gimple_omp_for_clauses (fd->for_stmt);
10450 stmts = NULL;
10451 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10452 if (!gimple_seq_empty_p (stmts))
10454 gimple_seq_add_seq (&stmts, *dlist);
10455 *dlist = stmts;
10457 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10458 vinit = fd->loop.n1;
10459 if (cond_code == EQ_EXPR
10460 && tree_fits_shwi_p (fd->loop.n2)
10461 && ! integer_zerop (fd->loop.n2))
10462 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10463 else
10464 vinit = unshare_expr (vinit);
10466 /* Initialize the iterator variable, so that threads that don't execute
10467 any iterations don't execute the lastprivate clauses by accident. */
10468 gimplify_assign (fd->loop.v, vinit, body_p);
10472 /* OpenACC privatization.
10474 Or, in other words, *sharing* at the respective OpenACC level of
10475 parallelism.
10477 From a correctness perspective, a non-addressable variable can't be accessed
10478 outside the current thread, so it can go in a (faster than shared memory)
10479 register -- though that register may need to be broadcast in some
10480 circumstances. A variable can only meaningfully be "shared" across workers
10481 or vector lanes if its address is taken, e.g. by a call to an atomic
10482 builtin.
10484 From an optimisation perspective, the answer might be fuzzier: maybe
10485 sometimes, using shared memory directly would be faster than
10486 broadcasting. */
10488 static void
10489 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10490 const location_t loc, const tree c,
10491 const tree decl)
10493 const dump_user_location_t d_u_loc
10494 = dump_user_location_t::from_location_t (loc);
10495 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10496 #if __GNUC__ >= 10
10497 # pragma GCC diagnostic push
10498 # pragma GCC diagnostic ignored "-Wformat"
10499 #endif
10500 dump_printf_loc (l_dump_flags, d_u_loc,
10501 "variable %<%T%> ", decl);
10502 #if __GNUC__ >= 10
10503 # pragma GCC diagnostic pop
10504 #endif
10505 if (c)
10506 dump_printf (l_dump_flags,
10507 "in %qs clause ",
10508 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10509 else
10510 dump_printf (l_dump_flags,
10511 "declared in block ");
10514 static bool
10515 oacc_privatization_candidate_p (const location_t loc, const tree c,
10516 const tree decl)
10518 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10520 /* There is some differentiation depending on block vs. clause. */
10521 bool block = !c;
10523 bool res = true;
10525 if (res && !VAR_P (decl))
10527 res = false;
10529 if (dump_enabled_p ())
10531 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10532 dump_printf (l_dump_flags,
10533 "potentially has improper OpenACC privatization level: %qs\n",
10534 get_tree_code_name (TREE_CODE (decl)));
10538 if (res && block && TREE_STATIC (decl))
10540 res = false;
10542 if (dump_enabled_p ())
10544 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10545 dump_printf (l_dump_flags,
10546 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10547 "static");
10551 if (res && block && DECL_EXTERNAL (decl))
10553 res = false;
10555 if (dump_enabled_p ())
10557 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10558 dump_printf (l_dump_flags,
10559 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10560 "external");
10564 if (res && !TREE_ADDRESSABLE (decl))
10566 res = false;
10568 if (dump_enabled_p ())
10570 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10571 dump_printf (l_dump_flags,
10572 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10573 "not addressable");
10577 if (res)
10579 if (dump_enabled_p ())
10581 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10582 dump_printf (l_dump_flags,
10583 "is candidate for adjusting OpenACC privatization level\n");
10587 if (dump_file && (dump_flags & TDF_DETAILS))
10589 print_generic_decl (dump_file, decl, dump_flags);
10590 fprintf (dump_file, "\n");
10593 return res;
10596 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10597 CTX. */
10599 static void
10600 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10602 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10603 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10605 tree decl = OMP_CLAUSE_DECL (c);
10607 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c, decl))
10608 continue;
10610 gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10611 ctx->oacc_privatization_candidates.safe_push (decl);
10615 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10616 CTX. */
10618 static void
10619 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10621 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10623 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL, decl))
10624 continue;
10626 gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10627 ctx->oacc_privatization_candidates.safe_push (decl);
10631 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10633 static tree
10634 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10635 struct walk_stmt_info *wi)
10637 gimple *stmt = gsi_stmt (*gsi_p);
10639 *handled_ops_p = true;
10640 switch (gimple_code (stmt))
10642 WALK_SUBSTMTS;
10644 case GIMPLE_OMP_FOR:
10645 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10646 && gimple_omp_for_combined_into_p (stmt))
10647 *handled_ops_p = false;
10648 break;
10650 case GIMPLE_OMP_SCAN:
10651 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10652 return integer_zero_node;
10653 default:
10654 break;
10656 return NULL;
10659 /* Helper function for lower_omp_for, add transformations for a worksharing
10660 loop with scan directives inside of it.
10661 For worksharing loop not combined with simd, transform:
10662 #pragma omp for reduction(inscan,+:r) private(i)
10663 for (i = 0; i < n; i = i + 1)
10666 update (r);
10668 #pragma omp scan inclusive(r)
10670 use (r);
10674 into two worksharing loops + code to merge results:
10676 num_threads = omp_get_num_threads ();
10677 thread_num = omp_get_thread_num ();
10678 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10679 <D.2099>:
10680 var2 = r;
10681 goto <D.2101>;
10682 <D.2100>:
10683 // For UDRs this is UDR init, or if ctors are needed, copy from
10684 // var3 that has been constructed to contain the neutral element.
10685 var2 = 0;
10686 <D.2101>:
10687 ivar = 0;
10688 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10689 // a shared array with num_threads elements and rprivb to a local array
10690 // number of elements equal to the number of (contiguous) iterations the
10691 // current thread will perform. controlb and controlp variables are
10692 // temporaries to handle deallocation of rprivb at the end of second
10693 // GOMP_FOR.
10694 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10695 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10696 for (i = 0; i < n; i = i + 1)
10699 // For UDRs this is UDR init or copy from var3.
10700 r = 0;
10701 // This is the input phase from user code.
10702 update (r);
10705 // For UDRs this is UDR merge.
10706 var2 = var2 + r;
10707 // Rather than handing it over to the user, save to local thread's
10708 // array.
10709 rprivb[ivar] = var2;
10710 // For exclusive scan, the above two statements are swapped.
10711 ivar = ivar + 1;
10714 // And remember the final value from this thread's into the shared
10715 // rpriva array.
10716 rpriva[(sizetype) thread_num] = var2;
10717 // If more than one thread, compute using Work-Efficient prefix sum
10718 // the inclusive parallel scan of the rpriva array.
10719 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10720 <D.2102>:
10721 GOMP_barrier ();
10722 down = 0;
10723 k = 1;
10724 num_threadsu = (unsigned int) num_threads;
10725 thread_numup1 = (unsigned int) thread_num + 1;
10726 <D.2108>:
10727 twok = k << 1;
10728 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10729 <D.2110>:
10730 down = 4294967295;
10731 k = k >> 1;
10732 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10733 <D.2112>:
10734 k = k >> 1;
10735 <D.2111>:
10736 twok = k << 1;
10737 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10738 mul = REALPART_EXPR <cplx>;
10739 ovf = IMAGPART_EXPR <cplx>;
10740 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10741 <D.2116>:
10742 andv = k & down;
10743 andvm1 = andv + 4294967295;
10744 l = mul + andvm1;
10745 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10746 <D.2120>:
10747 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10748 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10749 rpriva[l] = rpriva[l - k] + rpriva[l];
10750 <D.2117>:
10751 if (down == 0) goto <D.2121>; else goto <D.2122>;
10752 <D.2121>:
10753 k = k << 1;
10754 goto <D.2123>;
10755 <D.2122>:
10756 k = k >> 1;
10757 <D.2123>:
10758 GOMP_barrier ();
10759 if (k != 0) goto <D.2108>; else goto <D.2103>;
10760 <D.2103>:
10761 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10762 <D.2124>:
10763 // For UDRs this is UDR init or copy from var3.
10764 var2 = 0;
10765 goto <D.2126>;
10766 <D.2125>:
10767 var2 = rpriva[thread_num - 1];
10768 <D.2126>:
10769 ivar = 0;
10770 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10771 reduction(inscan,+:r) private(i)
10772 for (i = 0; i < n; i = i + 1)
10775 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10776 r = var2 + rprivb[ivar];
10779 // This is the scan phase from user code.
10780 use (r);
10781 // Plus a bump of the iterator.
10782 ivar = ivar + 1;
10784 } */
10786 static void
10787 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10788 struct omp_for_data *fd, omp_context *ctx)
10790 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10791 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10793 gimple_seq body = gimple_omp_body (stmt);
10794 gimple_stmt_iterator input1_gsi = gsi_none ();
10795 struct walk_stmt_info wi;
10796 memset (&wi, 0, sizeof (wi));
10797 wi.val_only = true;
10798 wi.info = (void *) &input1_gsi;
10799 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10800 gcc_assert (!gsi_end_p (input1_gsi));
10802 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10803 gimple_stmt_iterator gsi = input1_gsi;
10804 gsi_next (&gsi);
10805 gimple_stmt_iterator scan1_gsi = gsi;
10806 gimple *scan_stmt1 = gsi_stmt (gsi);
10807 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10809 gimple_seq input_body = gimple_omp_body (input_stmt1);
10810 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10811 gimple_omp_set_body (input_stmt1, NULL);
10812 gimple_omp_set_body (scan_stmt1, NULL);
10813 gimple_omp_set_body (stmt, NULL);
10815 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10816 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10817 gimple_omp_set_body (stmt, body);
10818 gimple_omp_set_body (input_stmt1, input_body);
10820 gimple_stmt_iterator input2_gsi = gsi_none ();
10821 memset (&wi, 0, sizeof (wi));
10822 wi.val_only = true;
10823 wi.info = (void *) &input2_gsi;
10824 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10825 gcc_assert (!gsi_end_p (input2_gsi));
10827 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10828 gsi = input2_gsi;
10829 gsi_next (&gsi);
10830 gimple_stmt_iterator scan2_gsi = gsi;
10831 gimple *scan_stmt2 = gsi_stmt (gsi);
10832 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10833 gimple_omp_set_body (scan_stmt2, scan_body);
10835 gimple_stmt_iterator input3_gsi = gsi_none ();
10836 gimple_stmt_iterator scan3_gsi = gsi_none ();
10837 gimple_stmt_iterator input4_gsi = gsi_none ();
10838 gimple_stmt_iterator scan4_gsi = gsi_none ();
10839 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10840 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10841 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10842 if (is_for_simd)
10844 memset (&wi, 0, sizeof (wi));
10845 wi.val_only = true;
10846 wi.info = (void *) &input3_gsi;
10847 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10848 gcc_assert (!gsi_end_p (input3_gsi));
10850 input_stmt3 = gsi_stmt (input3_gsi);
10851 gsi = input3_gsi;
10852 gsi_next (&gsi);
10853 scan3_gsi = gsi;
10854 scan_stmt3 = gsi_stmt (gsi);
10855 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10857 memset (&wi, 0, sizeof (wi));
10858 wi.val_only = true;
10859 wi.info = (void *) &input4_gsi;
10860 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10861 gcc_assert (!gsi_end_p (input4_gsi));
10863 input_stmt4 = gsi_stmt (input4_gsi);
10864 gsi = input4_gsi;
10865 gsi_next (&gsi);
10866 scan4_gsi = gsi;
10867 scan_stmt4 = gsi_stmt (gsi);
10868 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10870 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10871 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10874 tree num_threads = create_tmp_var (integer_type_node);
10875 tree thread_num = create_tmp_var (integer_type_node);
10876 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10877 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10878 gimple *g = gimple_build_call (nthreads_decl, 0);
10879 gimple_call_set_lhs (g, num_threads);
10880 gimple_seq_add_stmt (body_p, g);
10881 g = gimple_build_call (threadnum_decl, 0);
10882 gimple_call_set_lhs (g, thread_num);
10883 gimple_seq_add_stmt (body_p, g);
10885 tree ivar = create_tmp_var (sizetype);
10886 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10887 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10888 tree k = create_tmp_var (unsigned_type_node);
10889 tree l = create_tmp_var (unsigned_type_node);
10891 gimple_seq clist = NULL, mdlist = NULL;
10892 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10893 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10894 gimple_seq scan1_list = NULL, input2_list = NULL;
10895 gimple_seq last_list = NULL, reduc_list = NULL;
10896 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10897 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10898 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10900 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10901 tree var = OMP_CLAUSE_DECL (c);
10902 tree new_var = lookup_decl (var, ctx);
10903 tree var3 = NULL_TREE;
10904 tree new_vard = new_var;
10905 if (omp_privatize_by_reference (var))
10906 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10907 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10909 var3 = maybe_lookup_decl (new_vard, ctx);
10910 if (var3 == new_vard)
10911 var3 = NULL_TREE;
10914 tree ptype = build_pointer_type (TREE_TYPE (new_var));
10915 tree rpriva = create_tmp_var (ptype);
10916 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10917 OMP_CLAUSE_DECL (nc) = rpriva;
10918 *cp1 = nc;
10919 cp1 = &OMP_CLAUSE_CHAIN (nc);
10921 tree rprivb = create_tmp_var (ptype);
10922 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10923 OMP_CLAUSE_DECL (nc) = rprivb;
10924 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10925 *cp1 = nc;
10926 cp1 = &OMP_CLAUSE_CHAIN (nc);
10928 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10929 if (new_vard != new_var)
10930 TREE_ADDRESSABLE (var2) = 1;
10931 gimple_add_tmp_var (var2);
10933 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
10934 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10935 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10936 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10937 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
10939 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
10940 thread_num, integer_minus_one_node);
10941 x = fold_convert_loc (clause_loc, sizetype, x);
10942 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10943 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10944 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10945 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
10947 x = fold_convert_loc (clause_loc, sizetype, l);
10948 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10949 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10950 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10951 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
10953 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
10954 x = fold_convert_loc (clause_loc, sizetype, x);
10955 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10956 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10957 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10958 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
10960 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
10961 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10962 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
10963 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
10965 tree var4 = is_for_simd ? new_var : var2;
10966 tree var5 = NULL_TREE, var6 = NULL_TREE;
10967 if (is_for_simd)
10969 var5 = lookup_decl (var, input_simd_ctx);
10970 var6 = lookup_decl (var, scan_simd_ctx);
10971 if (new_vard != new_var)
10973 var5 = build_simple_mem_ref_loc (clause_loc, var5);
10974 var6 = build_simple_mem_ref_loc (clause_loc, var6);
10977 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10979 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10980 tree val = var2;
10982 x = lang_hooks.decls.omp_clause_default_ctor
10983 (c, var2, build_outer_var_ref (var, ctx));
10984 if (x)
10985 gimplify_and_add (x, &clist);
10987 x = build_outer_var_ref (var, ctx);
10988 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
10990 gimplify_and_add (x, &thr01_list);
10992 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
10993 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10994 if (var3)
10996 x = unshare_expr (var4);
10997 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10998 gimplify_and_add (x, &thrn1_list);
10999 x = unshare_expr (var4);
11000 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11001 gimplify_and_add (x, &thr02_list);
11003 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
11005 /* Otherwise, assign to it the identity element. */
11006 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11007 tseq = copy_gimple_seq_and_replace_locals (tseq);
11008 if (!is_for_simd)
11010 if (new_vard != new_var)
11011 val = build_fold_addr_expr_loc (clause_loc, val);
11012 SET_DECL_VALUE_EXPR (new_vard, val);
11013 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11015 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11016 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11017 lower_omp (&tseq, ctx);
11018 gimple_seq_add_seq (&thrn1_list, tseq);
11019 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11020 lower_omp (&tseq, ctx);
11021 gimple_seq_add_seq (&thr02_list, tseq);
11022 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11023 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11024 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11025 if (y)
11026 SET_DECL_VALUE_EXPR (new_vard, y);
11027 else
11029 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11030 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11034 x = unshare_expr (var4);
11035 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11036 gimplify_and_add (x, &thrn2_list);
11038 if (is_for_simd)
11040 x = unshare_expr (rprivb_ref);
11041 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11042 gimplify_and_add (x, &scan1_list);
11044 else
11046 if (ctx->scan_exclusive)
11048 x = unshare_expr (rprivb_ref);
11049 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11050 gimplify_and_add (x, &scan1_list);
11053 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11054 tseq = copy_gimple_seq_and_replace_locals (tseq);
11055 SET_DECL_VALUE_EXPR (placeholder, var2);
11056 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11057 lower_omp (&tseq, ctx);
11058 gimple_seq_add_seq (&scan1_list, tseq);
11060 if (ctx->scan_inclusive)
11062 x = unshare_expr (rprivb_ref);
11063 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11064 gimplify_and_add (x, &scan1_list);
11068 x = unshare_expr (rpriva_ref);
11069 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11070 unshare_expr (var4));
11071 gimplify_and_add (x, &mdlist);
11073 x = unshare_expr (is_for_simd ? var6 : new_var);
11074 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11075 gimplify_and_add (x, &input2_list);
11077 val = rprivb_ref;
11078 if (new_vard != new_var)
11079 val = build_fold_addr_expr_loc (clause_loc, val);
11081 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11082 tseq = copy_gimple_seq_and_replace_locals (tseq);
11083 SET_DECL_VALUE_EXPR (new_vard, val);
11084 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11085 if (is_for_simd)
11087 SET_DECL_VALUE_EXPR (placeholder, var6);
11088 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11090 else
11091 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11092 lower_omp (&tseq, ctx);
11093 if (y)
11094 SET_DECL_VALUE_EXPR (new_vard, y);
11095 else
11097 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11098 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11100 if (!is_for_simd)
11102 SET_DECL_VALUE_EXPR (placeholder, new_var);
11103 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11104 lower_omp (&tseq, ctx);
11106 gimple_seq_add_seq (&input2_list, tseq);
11108 x = build_outer_var_ref (var, ctx);
11109 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11110 gimplify_and_add (x, &last_list);
11112 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11113 gimplify_and_add (x, &reduc_list);
11114 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11115 tseq = copy_gimple_seq_and_replace_locals (tseq);
11116 val = rprival_ref;
11117 if (new_vard != new_var)
11118 val = build_fold_addr_expr_loc (clause_loc, val);
11119 SET_DECL_VALUE_EXPR (new_vard, val);
11120 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11121 SET_DECL_VALUE_EXPR (placeholder, var2);
11122 lower_omp (&tseq, ctx);
11123 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11124 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11125 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11126 if (y)
11127 SET_DECL_VALUE_EXPR (new_vard, y);
11128 else
11130 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11131 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11133 gimple_seq_add_seq (&reduc_list, tseq);
11134 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11135 gimplify_and_add (x, &reduc_list);
11137 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11138 if (x)
11139 gimplify_and_add (x, dlist);
11141 else
11143 x = build_outer_var_ref (var, ctx);
11144 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11146 x = omp_reduction_init (c, TREE_TYPE (new_var));
11147 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11148 &thrn1_list);
11149 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11151 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11153 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11154 if (code == MINUS_EXPR)
11155 code = PLUS_EXPR;
11157 if (is_for_simd)
11158 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11159 else
11161 if (ctx->scan_exclusive)
11162 gimplify_assign (unshare_expr (rprivb_ref), var2,
11163 &scan1_list);
11164 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11165 gimplify_assign (var2, x, &scan1_list);
11166 if (ctx->scan_inclusive)
11167 gimplify_assign (unshare_expr (rprivb_ref), var2,
11168 &scan1_list);
11171 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11172 &mdlist);
11174 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11175 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11177 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11178 &last_list);
11180 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11181 unshare_expr (rprival_ref));
11182 gimplify_assign (rprival_ref, x, &reduc_list);
11186 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11187 gimple_seq_add_stmt (&scan1_list, g);
11188 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11189 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11190 ? scan_stmt4 : scan_stmt2), g);
11192 tree controlb = create_tmp_var (boolean_type_node);
11193 tree controlp = create_tmp_var (ptr_type_node);
11194 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11195 OMP_CLAUSE_DECL (nc) = controlb;
11196 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11197 *cp1 = nc;
11198 cp1 = &OMP_CLAUSE_CHAIN (nc);
11199 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11200 OMP_CLAUSE_DECL (nc) = controlp;
11201 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11202 *cp1 = nc;
11203 cp1 = &OMP_CLAUSE_CHAIN (nc);
11204 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11205 OMP_CLAUSE_DECL (nc) = controlb;
11206 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11207 *cp2 = nc;
11208 cp2 = &OMP_CLAUSE_CHAIN (nc);
11209 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11210 OMP_CLAUSE_DECL (nc) = controlp;
11211 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11212 *cp2 = nc;
11213 cp2 = &OMP_CLAUSE_CHAIN (nc);
11215 *cp1 = gimple_omp_for_clauses (stmt);
11216 gimple_omp_for_set_clauses (stmt, new_clauses1);
11217 *cp2 = gimple_omp_for_clauses (new_stmt);
11218 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11220 if (is_for_simd)
11222 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11223 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11225 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11226 GSI_SAME_STMT);
11227 gsi_remove (&input3_gsi, true);
11228 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11229 GSI_SAME_STMT);
11230 gsi_remove (&scan3_gsi, true);
11231 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11232 GSI_SAME_STMT);
11233 gsi_remove (&input4_gsi, true);
11234 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11235 GSI_SAME_STMT);
11236 gsi_remove (&scan4_gsi, true);
11238 else
11240 gimple_omp_set_body (scan_stmt1, scan1_list);
11241 gimple_omp_set_body (input_stmt2, input2_list);
11244 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11245 GSI_SAME_STMT);
11246 gsi_remove (&input1_gsi, true);
11247 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11248 GSI_SAME_STMT);
11249 gsi_remove (&scan1_gsi, true);
11250 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11251 GSI_SAME_STMT);
11252 gsi_remove (&input2_gsi, true);
11253 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11254 GSI_SAME_STMT);
11255 gsi_remove (&scan2_gsi, true);
11257 gimple_seq_add_seq (body_p, clist);
11259 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11260 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11261 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11262 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11263 gimple_seq_add_stmt (body_p, g);
11264 g = gimple_build_label (lab1);
11265 gimple_seq_add_stmt (body_p, g);
11266 gimple_seq_add_seq (body_p, thr01_list);
11267 g = gimple_build_goto (lab3);
11268 gimple_seq_add_stmt (body_p, g);
11269 g = gimple_build_label (lab2);
11270 gimple_seq_add_stmt (body_p, g);
11271 gimple_seq_add_seq (body_p, thrn1_list);
11272 g = gimple_build_label (lab3);
11273 gimple_seq_add_stmt (body_p, g);
11275 g = gimple_build_assign (ivar, size_zero_node);
11276 gimple_seq_add_stmt (body_p, g);
11278 gimple_seq_add_stmt (body_p, stmt);
11279 gimple_seq_add_seq (body_p, body);
11280 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11281 fd->loop.v));
11283 g = gimple_build_omp_return (true);
11284 gimple_seq_add_stmt (body_p, g);
11285 gimple_seq_add_seq (body_p, mdlist);
11287 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11288 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11289 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11290 gimple_seq_add_stmt (body_p, g);
11291 g = gimple_build_label (lab1);
11292 gimple_seq_add_stmt (body_p, g);
11294 g = omp_build_barrier (NULL);
11295 gimple_seq_add_stmt (body_p, g);
11297 tree down = create_tmp_var (unsigned_type_node);
11298 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11299 gimple_seq_add_stmt (body_p, g);
11301 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11302 gimple_seq_add_stmt (body_p, g);
11304 tree num_threadsu = create_tmp_var (unsigned_type_node);
11305 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11306 gimple_seq_add_stmt (body_p, g);
11308 tree thread_numu = create_tmp_var (unsigned_type_node);
11309 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11310 gimple_seq_add_stmt (body_p, g);
11312 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11313 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11314 build_int_cst (unsigned_type_node, 1));
11315 gimple_seq_add_stmt (body_p, g);
11317 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11318 g = gimple_build_label (lab3);
11319 gimple_seq_add_stmt (body_p, g);
11321 tree twok = create_tmp_var (unsigned_type_node);
11322 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11323 gimple_seq_add_stmt (body_p, g);
11325 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11326 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11327 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11328 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11329 gimple_seq_add_stmt (body_p, g);
11330 g = gimple_build_label (lab4);
11331 gimple_seq_add_stmt (body_p, g);
11332 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11333 gimple_seq_add_stmt (body_p, g);
11334 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11335 gimple_seq_add_stmt (body_p, g);
11337 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11338 gimple_seq_add_stmt (body_p, g);
11339 g = gimple_build_label (lab6);
11340 gimple_seq_add_stmt (body_p, g);
11342 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11343 gimple_seq_add_stmt (body_p, g);
11345 g = gimple_build_label (lab5);
11346 gimple_seq_add_stmt (body_p, g);
11348 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11349 gimple_seq_add_stmt (body_p, g);
11351 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11352 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11353 gimple_call_set_lhs (g, cplx);
11354 gimple_seq_add_stmt (body_p, g);
11355 tree mul = create_tmp_var (unsigned_type_node);
11356 g = gimple_build_assign (mul, REALPART_EXPR,
11357 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11358 gimple_seq_add_stmt (body_p, g);
11359 tree ovf = create_tmp_var (unsigned_type_node);
11360 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11361 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11362 gimple_seq_add_stmt (body_p, g);
11364 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11365 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11366 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11367 lab7, lab8);
11368 gimple_seq_add_stmt (body_p, g);
11369 g = gimple_build_label (lab7);
11370 gimple_seq_add_stmt (body_p, g);
11372 tree andv = create_tmp_var (unsigned_type_node);
11373 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11374 gimple_seq_add_stmt (body_p, g);
11375 tree andvm1 = create_tmp_var (unsigned_type_node);
11376 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11377 build_minus_one_cst (unsigned_type_node));
11378 gimple_seq_add_stmt (body_p, g);
11380 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11381 gimple_seq_add_stmt (body_p, g);
11383 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11384 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11385 gimple_seq_add_stmt (body_p, g);
11386 g = gimple_build_label (lab9);
11387 gimple_seq_add_stmt (body_p, g);
11388 gimple_seq_add_seq (body_p, reduc_list);
11389 g = gimple_build_label (lab8);
11390 gimple_seq_add_stmt (body_p, g);
11392 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11393 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11394 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11395 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11396 lab10, lab11);
11397 gimple_seq_add_stmt (body_p, g);
11398 g = gimple_build_label (lab10);
11399 gimple_seq_add_stmt (body_p, g);
11400 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11401 gimple_seq_add_stmt (body_p, g);
11402 g = gimple_build_goto (lab12);
11403 gimple_seq_add_stmt (body_p, g);
11404 g = gimple_build_label (lab11);
11405 gimple_seq_add_stmt (body_p, g);
11406 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11407 gimple_seq_add_stmt (body_p, g);
11408 g = gimple_build_label (lab12);
11409 gimple_seq_add_stmt (body_p, g);
11411 g = omp_build_barrier (NULL);
11412 gimple_seq_add_stmt (body_p, g);
11414 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11415 lab3, lab2);
11416 gimple_seq_add_stmt (body_p, g);
11418 g = gimple_build_label (lab2);
11419 gimple_seq_add_stmt (body_p, g);
11421 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11422 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11423 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11424 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11425 gimple_seq_add_stmt (body_p, g);
11426 g = gimple_build_label (lab1);
11427 gimple_seq_add_stmt (body_p, g);
11428 gimple_seq_add_seq (body_p, thr02_list);
11429 g = gimple_build_goto (lab3);
11430 gimple_seq_add_stmt (body_p, g);
11431 g = gimple_build_label (lab2);
11432 gimple_seq_add_stmt (body_p, g);
11433 gimple_seq_add_seq (body_p, thrn2_list);
11434 g = gimple_build_label (lab3);
11435 gimple_seq_add_stmt (body_p, g);
11437 g = gimple_build_assign (ivar, size_zero_node);
11438 gimple_seq_add_stmt (body_p, g);
11439 gimple_seq_add_stmt (body_p, new_stmt);
11440 gimple_seq_add_seq (body_p, new_body);
11442 gimple_seq new_dlist = NULL;
11443 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11444 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11445 tree num_threadsm1 = create_tmp_var (integer_type_node);
11446 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11447 integer_minus_one_node);
11448 gimple_seq_add_stmt (&new_dlist, g);
11449 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11450 gimple_seq_add_stmt (&new_dlist, g);
11451 g = gimple_build_label (lab1);
11452 gimple_seq_add_stmt (&new_dlist, g);
11453 gimple_seq_add_seq (&new_dlist, last_list);
11454 g = gimple_build_label (lab2);
11455 gimple_seq_add_stmt (&new_dlist, g);
11456 gimple_seq_add_seq (&new_dlist, *dlist);
11457 *dlist = new_dlist;
11460 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11461 the addresses of variables to be made private at the surrounding
11462 parallelism level. Such functions appear in the gimple code stream in two
11463 forms, e.g. for a partitioned loop:
11465 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11466 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11467 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11468 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11470 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11471 not as part of a HEAD_MARK sequence:
11473 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11475 For such stand-alone appearances, the 3rd argument is always 0, denoting
11476 gang partitioning. */
11478 static gcall *
11479 lower_oacc_private_marker (omp_context *ctx)
11481 if (ctx->oacc_privatization_candidates.length () == 0)
11482 return NULL;
11484 auto_vec<tree, 5> args;
11486 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11487 args.quick_push (integer_zero_node);
11488 args.quick_push (integer_minus_one_node);
11490 int i;
11491 tree decl;
11492 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11494 for (omp_context *thisctx = ctx; thisctx; thisctx = thisctx->outer)
11496 tree inner_decl = maybe_lookup_decl (decl, thisctx);
11497 if (inner_decl)
11499 decl = inner_decl;
11500 break;
11503 gcc_checking_assert (decl);
11505 tree addr = build_fold_addr_expr (decl);
11506 args.safe_push (addr);
11509 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11512 /* Lower code for an OMP loop directive. */
11514 static void
11515 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11517 tree *rhs_p, block;
11518 struct omp_for_data fd, *fdp = NULL;
11519 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11520 gbind *new_stmt;
11521 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11522 gimple_seq cnt_list = NULL, clist = NULL;
11523 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11524 size_t i;
11526 push_gimplify_context ();
11528 if (is_gimple_omp_oacc (ctx->stmt))
11529 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11531 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11533 block = make_node (BLOCK);
11534 new_stmt = gimple_build_bind (NULL, NULL, block);
11535 /* Replace at gsi right away, so that 'stmt' is no member
11536 of a sequence anymore as we're going to add to a different
11537 one below. */
11538 gsi_replace (gsi_p, new_stmt, true);
11540 /* Move declaration of temporaries in the loop body before we make
11541 it go away. */
11542 omp_for_body = gimple_omp_body (stmt);
11543 if (!gimple_seq_empty_p (omp_for_body)
11544 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11546 gbind *inner_bind
11547 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11548 tree vars = gimple_bind_vars (inner_bind);
11549 if (is_gimple_omp_oacc (ctx->stmt))
11550 oacc_privatization_scan_decl_chain (ctx, vars);
11551 gimple_bind_append_vars (new_stmt, vars);
11552 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11553 keep them on the inner_bind and it's block. */
11554 gimple_bind_set_vars (inner_bind, NULL_TREE);
11555 if (gimple_bind_block (inner_bind))
11556 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11559 if (gimple_omp_for_combined_into_p (stmt))
11561 omp_extract_for_data (stmt, &fd, NULL);
11562 fdp = &fd;
11564 /* We need two temporaries with fd.loop.v type (istart/iend)
11565 and then (fd.collapse - 1) temporaries with the same
11566 type for count2 ... countN-1 vars if not constant. */
11567 size_t count = 2;
11568 tree type = fd.iter_type;
11569 if (fd.collapse > 1
11570 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11571 count += fd.collapse - 1;
11572 size_t count2 = 0;
11573 tree type2 = NULL_TREE;
11574 bool taskreg_for
11575 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11576 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11577 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11578 tree simtc = NULL;
11579 tree clauses = *pc;
11580 if (fd.collapse > 1
11581 && fd.non_rect
11582 && fd.last_nonrect == fd.first_nonrect + 1
11583 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11584 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11585 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11587 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11588 type2 = TREE_TYPE (v);
11589 count++;
11590 count2 = 3;
11592 if (taskreg_for)
11593 outerc
11594 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11595 OMP_CLAUSE__LOOPTEMP_);
11596 if (ctx->simt_stmt)
11597 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11598 OMP_CLAUSE__LOOPTEMP_);
11599 for (i = 0; i < count + count2; i++)
11601 tree temp;
11602 if (taskreg_for)
11604 gcc_assert (outerc);
11605 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11606 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11607 OMP_CLAUSE__LOOPTEMP_);
11609 else
11611 /* If there are 2 adjacent SIMD stmts, one with _simt_
11612 clause, another without, make sure they have the same
11613 decls in _looptemp_ clauses, because the outer stmt
11614 they are combined into will look up just one inner_stmt. */
11615 if (ctx->simt_stmt)
11616 temp = OMP_CLAUSE_DECL (simtc);
11617 else
11618 temp = create_tmp_var (i >= count ? type2 : type);
11619 insert_decl_map (&ctx->outer->cb, temp, temp);
11621 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11622 OMP_CLAUSE_DECL (*pc) = temp;
11623 pc = &OMP_CLAUSE_CHAIN (*pc);
11624 if (ctx->simt_stmt)
11625 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11626 OMP_CLAUSE__LOOPTEMP_);
11628 *pc = clauses;
11631 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11632 dlist = NULL;
11633 body = NULL;
11634 tree rclauses
11635 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11636 OMP_CLAUSE_REDUCTION);
11637 tree rtmp = NULL_TREE;
11638 if (rclauses)
11640 tree type = build_pointer_type (pointer_sized_int_node);
11641 tree temp = create_tmp_var (type);
11642 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11643 OMP_CLAUSE_DECL (c) = temp;
11644 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11645 gimple_omp_for_set_clauses (stmt, c);
11646 lower_omp_task_reductions (ctx, OMP_FOR,
11647 gimple_omp_for_clauses (stmt),
11648 &tred_ilist, &tred_dlist);
11649 rclauses = c;
11650 rtmp = make_ssa_name (type);
11651 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11654 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11655 ctx);
11657 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11658 fdp);
11659 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11660 gimple_omp_for_pre_body (stmt));
11662 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11664 gcall *private_marker = NULL;
11665 if (is_gimple_omp_oacc (ctx->stmt)
11666 && !gimple_seq_empty_p (omp_for_body))
11667 private_marker = lower_oacc_private_marker (ctx);
11669 /* Lower the header expressions. At this point, we can assume that
11670 the header is of the form:
11672 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11674 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11675 using the .omp_data_s mapping, if needed. */
11676 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11678 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11679 if (TREE_CODE (*rhs_p) == TREE_VEC)
11681 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11682 TREE_VEC_ELT (*rhs_p, 1)
11683 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11684 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11685 TREE_VEC_ELT (*rhs_p, 2)
11686 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11688 else if (!is_gimple_min_invariant (*rhs_p))
11689 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11690 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11691 recompute_tree_invariant_for_addr_expr (*rhs_p);
11693 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11694 if (TREE_CODE (*rhs_p) == TREE_VEC)
11696 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11697 TREE_VEC_ELT (*rhs_p, 1)
11698 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11699 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11700 TREE_VEC_ELT (*rhs_p, 2)
11701 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11703 else if (!is_gimple_min_invariant (*rhs_p))
11704 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11705 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11706 recompute_tree_invariant_for_addr_expr (*rhs_p);
11708 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11709 if (!is_gimple_min_invariant (*rhs_p))
11710 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11712 if (rclauses)
11713 gimple_seq_add_seq (&tred_ilist, cnt_list);
11714 else
11715 gimple_seq_add_seq (&body, cnt_list);
11717 /* Once lowered, extract the bounds and clauses. */
11718 omp_extract_for_data (stmt, &fd, NULL);
11720 if (is_gimple_omp_oacc (ctx->stmt)
11721 && !ctx_in_oacc_kernels_region (ctx))
11722 lower_oacc_head_tail (gimple_location (stmt),
11723 gimple_omp_for_clauses (stmt), private_marker,
11724 &oacc_head, &oacc_tail, ctx);
11726 /* Add OpenACC partitioning and reduction markers just before the loop. */
11727 if (oacc_head)
11728 gimple_seq_add_seq (&body, oacc_head);
11730 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11732 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11733 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11734 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11735 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11737 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11738 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11739 OMP_CLAUSE_LINEAR_STEP (c)
11740 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11741 ctx);
11744 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11745 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11746 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11747 else
11749 gimple_seq_add_stmt (&body, stmt);
11750 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11753 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11754 fd.loop.v));
11756 /* After the loop, add exit clauses. */
11757 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11759 if (clist)
11761 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11762 gcall *g = gimple_build_call (fndecl, 0);
11763 gimple_seq_add_stmt (&body, g);
11764 gimple_seq_add_seq (&body, clist);
11765 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11766 g = gimple_build_call (fndecl, 0);
11767 gimple_seq_add_stmt (&body, g);
11770 if (ctx->cancellable)
11771 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11773 gimple_seq_add_seq (&body, dlist);
11775 if (rclauses)
11777 gimple_seq_add_seq (&tred_ilist, body);
11778 body = tred_ilist;
11781 body = maybe_catch_exception (body);
11783 /* Region exit marker goes at the end of the loop body. */
11784 gimple *g = gimple_build_omp_return (fd.have_nowait);
11785 gimple_seq_add_stmt (&body, g);
11787 gimple_seq_add_seq (&body, tred_dlist);
11789 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11791 if (rclauses)
11792 OMP_CLAUSE_DECL (rclauses) = rtmp;
11794 /* Add OpenACC joining and reduction markers just after the loop. */
11795 if (oacc_tail)
11796 gimple_seq_add_seq (&body, oacc_tail);
11798 pop_gimplify_context (new_stmt);
11800 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11801 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11802 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11803 if (BLOCK_VARS (block))
11804 TREE_USED (block) = 1;
11806 gimple_bind_set_body (new_stmt, body);
11807 gimple_omp_set_body (stmt, NULL);
11808 gimple_omp_for_set_pre_body (stmt, NULL);
11811 /* Callback for walk_stmts. Check if the current statement only contains
11812 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11814 static tree
11815 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11816 bool *handled_ops_p,
11817 struct walk_stmt_info *wi)
11819 int *info = (int *) wi->info;
11820 gimple *stmt = gsi_stmt (*gsi_p);
11822 *handled_ops_p = true;
11823 switch (gimple_code (stmt))
11825 WALK_SUBSTMTS;
11827 case GIMPLE_DEBUG:
11828 break;
11829 case GIMPLE_OMP_FOR:
11830 case GIMPLE_OMP_SECTIONS:
11831 *info = *info == 0 ? 1 : -1;
11832 break;
11833 default:
11834 *info = -1;
11835 break;
11837 return NULL;
11840 struct omp_taskcopy_context
11842 /* This field must be at the beginning, as we do "inheritance": Some
11843 callback functions for tree-inline.c (e.g., omp_copy_decl)
11844 receive a copy_body_data pointer that is up-casted to an
11845 omp_context pointer. */
11846 copy_body_data cb;
11847 omp_context *ctx;
11850 static tree
11851 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11853 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11855 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11856 return create_tmp_var (TREE_TYPE (var));
11858 return var;
11861 static tree
11862 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11864 tree name, new_fields = NULL, type, f;
11866 type = lang_hooks.types.make_type (RECORD_TYPE);
11867 name = DECL_NAME (TYPE_NAME (orig_type));
11868 name = build_decl (gimple_location (tcctx->ctx->stmt),
11869 TYPE_DECL, name, type);
11870 TYPE_NAME (type) = name;
11872 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11874 tree new_f = copy_node (f);
11875 DECL_CONTEXT (new_f) = type;
11876 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11877 TREE_CHAIN (new_f) = new_fields;
11878 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11879 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11880 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11881 &tcctx->cb, NULL);
11882 new_fields = new_f;
11883 tcctx->cb.decl_map->put (f, new_f);
11885 TYPE_FIELDS (type) = nreverse (new_fields);
11886 layout_type (type);
11887 return type;
11890 /* Create task copyfn. */
11892 static void
11893 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11895 struct function *child_cfun;
11896 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11897 tree record_type, srecord_type, bind, list;
11898 bool record_needs_remap = false, srecord_needs_remap = false;
11899 splay_tree_node n;
11900 struct omp_taskcopy_context tcctx;
11901 location_t loc = gimple_location (task_stmt);
11902 size_t looptempno = 0;
11904 child_fn = gimple_omp_task_copy_fn (task_stmt);
11905 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11906 gcc_assert (child_cfun->cfg == NULL);
11907 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11909 /* Reset DECL_CONTEXT on function arguments. */
11910 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11911 DECL_CONTEXT (t) = child_fn;
11913 /* Populate the function. */
11914 push_gimplify_context ();
11915 push_cfun (child_cfun);
11917 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11918 TREE_SIDE_EFFECTS (bind) = 1;
11919 list = NULL;
11920 DECL_SAVED_TREE (child_fn) = bind;
11921 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
11923 /* Remap src and dst argument types if needed. */
11924 record_type = ctx->record_type;
11925 srecord_type = ctx->srecord_type;
11926 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
11927 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11929 record_needs_remap = true;
11930 break;
11932 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
11933 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11935 srecord_needs_remap = true;
11936 break;
11939 if (record_needs_remap || srecord_needs_remap)
11941 memset (&tcctx, '\0', sizeof (tcctx));
11942 tcctx.cb.src_fn = ctx->cb.src_fn;
11943 tcctx.cb.dst_fn = child_fn;
11944 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
11945 gcc_checking_assert (tcctx.cb.src_node);
11946 tcctx.cb.dst_node = tcctx.cb.src_node;
11947 tcctx.cb.src_cfun = ctx->cb.src_cfun;
11948 tcctx.cb.copy_decl = task_copyfn_copy_decl;
11949 tcctx.cb.eh_lp_nr = 0;
11950 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
11951 tcctx.cb.decl_map = new hash_map<tree, tree>;
11952 tcctx.ctx = ctx;
11954 if (record_needs_remap)
11955 record_type = task_copyfn_remap_type (&tcctx, record_type);
11956 if (srecord_needs_remap)
11957 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
11959 else
11960 tcctx.cb.decl_map = NULL;
11962 arg = DECL_ARGUMENTS (child_fn);
11963 TREE_TYPE (arg) = build_pointer_type (record_type);
11964 sarg = DECL_CHAIN (arg);
11965 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
11967 /* First pass: initialize temporaries used in record_type and srecord_type
11968 sizes and field offsets. */
11969 if (tcctx.cb.decl_map)
11970 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11971 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11973 tree *p;
11975 decl = OMP_CLAUSE_DECL (c);
11976 p = tcctx.cb.decl_map->get (decl);
11977 if (p == NULL)
11978 continue;
11979 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11980 sf = (tree) n->value;
11981 sf = *tcctx.cb.decl_map->get (sf);
11982 src = build_simple_mem_ref_loc (loc, sarg);
11983 src = omp_build_component_ref (src, sf);
11984 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
11985 append_to_statement_list (t, &list);
11988 /* Second pass: copy shared var pointers and copy construct non-VLA
11989 firstprivate vars. */
11990 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11991 switch (OMP_CLAUSE_CODE (c))
11993 splay_tree_key key;
11994 case OMP_CLAUSE_SHARED:
11995 decl = OMP_CLAUSE_DECL (c);
11996 key = (splay_tree_key) decl;
11997 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
11998 key = (splay_tree_key) &DECL_UID (decl);
11999 n = splay_tree_lookup (ctx->field_map, key);
12000 if (n == NULL)
12001 break;
12002 f = (tree) n->value;
12003 if (tcctx.cb.decl_map)
12004 f = *tcctx.cb.decl_map->get (f);
12005 n = splay_tree_lookup (ctx->sfield_map, key);
12006 sf = (tree) n->value;
12007 if (tcctx.cb.decl_map)
12008 sf = *tcctx.cb.decl_map->get (sf);
12009 src = build_simple_mem_ref_loc (loc, sarg);
12010 src = omp_build_component_ref (src, sf);
12011 dst = build_simple_mem_ref_loc (loc, arg);
12012 dst = omp_build_component_ref (dst, f);
12013 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12014 append_to_statement_list (t, &list);
12015 break;
12016 case OMP_CLAUSE_REDUCTION:
12017 case OMP_CLAUSE_IN_REDUCTION:
12018 decl = OMP_CLAUSE_DECL (c);
12019 if (TREE_CODE (decl) == MEM_REF)
12021 decl = TREE_OPERAND (decl, 0);
12022 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12023 decl = TREE_OPERAND (decl, 0);
12024 if (TREE_CODE (decl) == INDIRECT_REF
12025 || TREE_CODE (decl) == ADDR_EXPR)
12026 decl = TREE_OPERAND (decl, 0);
12028 key = (splay_tree_key) decl;
12029 n = splay_tree_lookup (ctx->field_map, key);
12030 if (n == NULL)
12031 break;
12032 f = (tree) n->value;
12033 if (tcctx.cb.decl_map)
12034 f = *tcctx.cb.decl_map->get (f);
12035 n = splay_tree_lookup (ctx->sfield_map, key);
12036 sf = (tree) n->value;
12037 if (tcctx.cb.decl_map)
12038 sf = *tcctx.cb.decl_map->get (sf);
12039 src = build_simple_mem_ref_loc (loc, sarg);
12040 src = omp_build_component_ref (src, sf);
12041 if (decl != OMP_CLAUSE_DECL (c)
12042 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12043 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12044 src = build_simple_mem_ref_loc (loc, src);
12045 dst = build_simple_mem_ref_loc (loc, arg);
12046 dst = omp_build_component_ref (dst, f);
12047 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12048 append_to_statement_list (t, &list);
12049 break;
12050 case OMP_CLAUSE__LOOPTEMP_:
12051 /* Fields for first two _looptemp_ clauses are initialized by
12052 GOMP_taskloop*, the rest are handled like firstprivate. */
12053 if (looptempno < 2)
12055 looptempno++;
12056 break;
12058 /* FALLTHRU */
12059 case OMP_CLAUSE__REDUCTEMP_:
12060 case OMP_CLAUSE_FIRSTPRIVATE:
12061 decl = OMP_CLAUSE_DECL (c);
12062 if (is_variable_sized (decl))
12063 break;
12064 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12065 if (n == NULL)
12066 break;
12067 f = (tree) n->value;
12068 if (tcctx.cb.decl_map)
12069 f = *tcctx.cb.decl_map->get (f);
12070 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12071 if (n != NULL)
12073 sf = (tree) n->value;
12074 if (tcctx.cb.decl_map)
12075 sf = *tcctx.cb.decl_map->get (sf);
12076 src = build_simple_mem_ref_loc (loc, sarg);
12077 src = omp_build_component_ref (src, sf);
12078 if (use_pointer_for_field (decl, NULL)
12079 || omp_privatize_by_reference (decl))
12080 src = build_simple_mem_ref_loc (loc, src);
12082 else
12083 src = decl;
12084 dst = build_simple_mem_ref_loc (loc, arg);
12085 dst = omp_build_component_ref (dst, f);
12086 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12087 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12088 else
12090 if (ctx->allocate_map)
12091 if (tree *allocatorp = ctx->allocate_map->get (decl))
12093 tree allocator = *allocatorp;
12094 HOST_WIDE_INT ialign = 0;
12095 if (TREE_CODE (allocator) == TREE_LIST)
12097 ialign = tree_to_uhwi (TREE_VALUE (allocator));
12098 allocator = TREE_PURPOSE (allocator);
12100 if (TREE_CODE (allocator) != INTEGER_CST)
12102 n = splay_tree_lookup (ctx->sfield_map,
12103 (splay_tree_key) allocator);
12104 allocator = (tree) n->value;
12105 if (tcctx.cb.decl_map)
12106 allocator = *tcctx.cb.decl_map->get (allocator);
12107 tree a = build_simple_mem_ref_loc (loc, sarg);
12108 allocator = omp_build_component_ref (a, allocator);
12110 allocator = fold_convert (pointer_sized_int_node, allocator);
12111 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12112 tree align = build_int_cst (size_type_node,
12113 MAX (ialign,
12114 DECL_ALIGN_UNIT (decl)));
12115 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12116 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12117 allocator);
12118 ptr = fold_convert (TREE_TYPE (dst), ptr);
12119 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12120 append_to_statement_list (t, &list);
12121 dst = build_simple_mem_ref_loc (loc, dst);
12123 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12125 append_to_statement_list (t, &list);
12126 break;
12127 case OMP_CLAUSE_PRIVATE:
12128 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12129 break;
12130 decl = OMP_CLAUSE_DECL (c);
12131 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12132 f = (tree) n->value;
12133 if (tcctx.cb.decl_map)
12134 f = *tcctx.cb.decl_map->get (f);
12135 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12136 if (n != NULL)
12138 sf = (tree) n->value;
12139 if (tcctx.cb.decl_map)
12140 sf = *tcctx.cb.decl_map->get (sf);
12141 src = build_simple_mem_ref_loc (loc, sarg);
12142 src = omp_build_component_ref (src, sf);
12143 if (use_pointer_for_field (decl, NULL))
12144 src = build_simple_mem_ref_loc (loc, src);
12146 else
12147 src = decl;
12148 dst = build_simple_mem_ref_loc (loc, arg);
12149 dst = omp_build_component_ref (dst, f);
12150 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12151 append_to_statement_list (t, &list);
12152 break;
12153 default:
12154 break;
12157 /* Last pass: handle VLA firstprivates. */
12158 if (tcctx.cb.decl_map)
12159 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12160 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12162 tree ind, ptr, df;
12164 decl = OMP_CLAUSE_DECL (c);
12165 if (!is_variable_sized (decl))
12166 continue;
12167 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12168 if (n == NULL)
12169 continue;
12170 f = (tree) n->value;
12171 f = *tcctx.cb.decl_map->get (f);
12172 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12173 ind = DECL_VALUE_EXPR (decl);
12174 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12175 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12176 n = splay_tree_lookup (ctx->sfield_map,
12177 (splay_tree_key) TREE_OPERAND (ind, 0));
12178 sf = (tree) n->value;
12179 sf = *tcctx.cb.decl_map->get (sf);
12180 src = build_simple_mem_ref_loc (loc, sarg);
12181 src = omp_build_component_ref (src, sf);
12182 src = build_simple_mem_ref_loc (loc, src);
12183 dst = build_simple_mem_ref_loc (loc, arg);
12184 dst = omp_build_component_ref (dst, f);
12185 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12186 append_to_statement_list (t, &list);
12187 n = splay_tree_lookup (ctx->field_map,
12188 (splay_tree_key) TREE_OPERAND (ind, 0));
12189 df = (tree) n->value;
12190 df = *tcctx.cb.decl_map->get (df);
12191 ptr = build_simple_mem_ref_loc (loc, arg);
12192 ptr = omp_build_component_ref (ptr, df);
12193 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12194 build_fold_addr_expr_loc (loc, dst));
12195 append_to_statement_list (t, &list);
12198 t = build1 (RETURN_EXPR, void_type_node, NULL);
12199 append_to_statement_list (t, &list);
12201 if (tcctx.cb.decl_map)
12202 delete tcctx.cb.decl_map;
12203 pop_gimplify_context (NULL);
12204 BIND_EXPR_BODY (bind) = list;
12205 pop_cfun ();
12208 static void
12209 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12211 tree c, clauses;
12212 gimple *g;
12213 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
12215 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12216 gcc_assert (clauses);
12217 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12218 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12219 switch (OMP_CLAUSE_DEPEND_KIND (c))
12221 case OMP_CLAUSE_DEPEND_LAST:
12222 /* Lowering already done at gimplification. */
12223 return;
12224 case OMP_CLAUSE_DEPEND_IN:
12225 cnt[2]++;
12226 break;
12227 case OMP_CLAUSE_DEPEND_OUT:
12228 case OMP_CLAUSE_DEPEND_INOUT:
12229 cnt[0]++;
12230 break;
12231 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12232 cnt[1]++;
12233 break;
12234 case OMP_CLAUSE_DEPEND_DEPOBJ:
12235 cnt[3]++;
12236 break;
12237 case OMP_CLAUSE_DEPEND_SOURCE:
12238 case OMP_CLAUSE_DEPEND_SINK:
12239 /* FALLTHRU */
12240 default:
12241 gcc_unreachable ();
12243 if (cnt[1] || cnt[3])
12244 idx = 5;
12245 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
12246 tree type = build_array_type_nelts (ptr_type_node, total + idx);
12247 tree array = create_tmp_var (type);
12248 TREE_ADDRESSABLE (array) = 1;
12249 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12250 NULL_TREE);
12251 if (idx == 5)
12253 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12254 gimple_seq_add_stmt (iseq, g);
12255 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12256 NULL_TREE);
12258 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12259 gimple_seq_add_stmt (iseq, g);
12260 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12262 r = build4 (ARRAY_REF, ptr_type_node, array,
12263 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12264 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12265 gimple_seq_add_stmt (iseq, g);
12267 for (i = 0; i < 4; i++)
12269 if (cnt[i] == 0)
12270 continue;
12271 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12272 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12273 continue;
12274 else
12276 switch (OMP_CLAUSE_DEPEND_KIND (c))
12278 case OMP_CLAUSE_DEPEND_IN:
12279 if (i != 2)
12280 continue;
12281 break;
12282 case OMP_CLAUSE_DEPEND_OUT:
12283 case OMP_CLAUSE_DEPEND_INOUT:
12284 if (i != 0)
12285 continue;
12286 break;
12287 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12288 if (i != 1)
12289 continue;
12290 break;
12291 case OMP_CLAUSE_DEPEND_DEPOBJ:
12292 if (i != 3)
12293 continue;
12294 break;
12295 default:
12296 gcc_unreachable ();
12298 tree t = OMP_CLAUSE_DECL (c);
12299 t = fold_convert (ptr_type_node, t);
12300 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12301 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12302 NULL_TREE, NULL_TREE);
12303 g = gimple_build_assign (r, t);
12304 gimple_seq_add_stmt (iseq, g);
12307 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12308 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12309 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12310 OMP_CLAUSE_CHAIN (c) = *pclauses;
12311 *pclauses = c;
12312 tree clobber = build_clobber (type);
12313 g = gimple_build_assign (array, clobber);
12314 gimple_seq_add_stmt (oseq, g);
12317 /* Lower the OpenMP parallel or task directive in the current statement
12318 in GSI_P. CTX holds context information for the directive. */
12320 static void
12321 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12323 tree clauses;
12324 tree child_fn, t;
12325 gimple *stmt = gsi_stmt (*gsi_p);
12326 gbind *par_bind, *bind, *dep_bind = NULL;
12327 gimple_seq par_body;
12328 location_t loc = gimple_location (stmt);
12330 clauses = gimple_omp_taskreg_clauses (stmt);
12331 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12332 && gimple_omp_task_taskwait_p (stmt))
12334 par_bind = NULL;
12335 par_body = NULL;
12337 else
12339 par_bind
12340 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12341 par_body = gimple_bind_body (par_bind);
12343 child_fn = ctx->cb.dst_fn;
12344 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12345 && !gimple_omp_parallel_combined_p (stmt))
12347 struct walk_stmt_info wi;
12348 int ws_num = 0;
12350 memset (&wi, 0, sizeof (wi));
12351 wi.info = &ws_num;
12352 wi.val_only = true;
12353 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12354 if (ws_num == 1)
12355 gimple_omp_parallel_set_combined_p (stmt, true);
12357 gimple_seq dep_ilist = NULL;
12358 gimple_seq dep_olist = NULL;
12359 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12360 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12362 push_gimplify_context ();
12363 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12364 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12365 &dep_ilist, &dep_olist);
12368 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12369 && gimple_omp_task_taskwait_p (stmt))
12371 if (dep_bind)
12373 gsi_replace (gsi_p, dep_bind, true);
12374 gimple_bind_add_seq (dep_bind, dep_ilist);
12375 gimple_bind_add_stmt (dep_bind, stmt);
12376 gimple_bind_add_seq (dep_bind, dep_olist);
12377 pop_gimplify_context (dep_bind);
12379 return;
12382 if (ctx->srecord_type)
12383 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12385 gimple_seq tskred_ilist = NULL;
12386 gimple_seq tskred_olist = NULL;
12387 if ((is_task_ctx (ctx)
12388 && gimple_omp_task_taskloop_p (ctx->stmt)
12389 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12390 OMP_CLAUSE_REDUCTION))
12391 || (is_parallel_ctx (ctx)
12392 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12393 OMP_CLAUSE__REDUCTEMP_)))
12395 if (dep_bind == NULL)
12397 push_gimplify_context ();
12398 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12400 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12401 : OMP_PARALLEL,
12402 gimple_omp_taskreg_clauses (ctx->stmt),
12403 &tskred_ilist, &tskred_olist);
12406 push_gimplify_context ();
12408 gimple_seq par_olist = NULL;
12409 gimple_seq par_ilist = NULL;
12410 gimple_seq par_rlist = NULL;
12411 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12412 lower_omp (&par_body, ctx);
12413 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12414 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12416 /* Declare all the variables created by mapping and the variables
12417 declared in the scope of the parallel body. */
12418 record_vars_into (ctx->block_vars, child_fn);
12419 maybe_remove_omp_member_access_dummy_vars (par_bind);
12420 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12422 if (ctx->record_type)
12424 ctx->sender_decl
12425 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12426 : ctx->record_type, ".omp_data_o");
12427 DECL_NAMELESS (ctx->sender_decl) = 1;
12428 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12429 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12432 gimple_seq olist = NULL;
12433 gimple_seq ilist = NULL;
12434 lower_send_clauses (clauses, &ilist, &olist, ctx);
12435 lower_send_shared_vars (&ilist, &olist, ctx);
12437 if (ctx->record_type)
12439 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12440 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12441 clobber));
12444 /* Once all the expansions are done, sequence all the different
12445 fragments inside gimple_omp_body. */
12447 gimple_seq new_body = NULL;
12449 if (ctx->record_type)
12451 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12452 /* fixup_child_record_type might have changed receiver_decl's type. */
12453 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12454 gimple_seq_add_stmt (&new_body,
12455 gimple_build_assign (ctx->receiver_decl, t));
12458 gimple_seq_add_seq (&new_body, par_ilist);
12459 gimple_seq_add_seq (&new_body, par_body);
12460 gimple_seq_add_seq (&new_body, par_rlist);
12461 if (ctx->cancellable)
12462 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12463 gimple_seq_add_seq (&new_body, par_olist);
12464 new_body = maybe_catch_exception (new_body);
12465 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12466 gimple_seq_add_stmt (&new_body,
12467 gimple_build_omp_continue (integer_zero_node,
12468 integer_zero_node));
12469 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12470 gimple_omp_set_body (stmt, new_body);
12472 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12473 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12474 else
12475 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12476 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12477 gimple_bind_add_seq (bind, ilist);
12478 gimple_bind_add_stmt (bind, stmt);
12479 gimple_bind_add_seq (bind, olist);
12481 pop_gimplify_context (NULL);
12483 if (dep_bind)
12485 gimple_bind_add_seq (dep_bind, dep_ilist);
12486 gimple_bind_add_seq (dep_bind, tskred_ilist);
12487 gimple_bind_add_stmt (dep_bind, bind);
12488 gimple_bind_add_seq (dep_bind, tskred_olist);
12489 gimple_bind_add_seq (dep_bind, dep_olist);
12490 pop_gimplify_context (dep_bind);
12494 /* Lower the GIMPLE_OMP_TARGET in the current statement
12495 in GSI_P. CTX holds context information for the directive. */
12497 static void
12498 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12500 tree clauses;
12501 tree child_fn, t, c;
12502 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12503 gbind *tgt_bind, *bind, *dep_bind = NULL;
12504 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12505 location_t loc = gimple_location (stmt);
12506 bool offloaded, data_region;
12507 unsigned int map_cnt = 0;
12508 tree in_reduction_clauses = NULL_TREE;
12510 offloaded = is_gimple_omp_offloaded (stmt);
12511 switch (gimple_omp_target_kind (stmt))
12513 case GF_OMP_TARGET_KIND_REGION:
12514 tree *p, *q;
12515 q = &in_reduction_clauses;
12516 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12517 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12519 *q = *p;
12520 q = &OMP_CLAUSE_CHAIN (*q);
12521 *p = OMP_CLAUSE_CHAIN (*p);
12523 else
12524 p = &OMP_CLAUSE_CHAIN (*p);
12525 *q = NULL_TREE;
12526 *p = in_reduction_clauses;
12527 /* FALLTHRU */
12528 case GF_OMP_TARGET_KIND_UPDATE:
12529 case GF_OMP_TARGET_KIND_ENTER_DATA:
12530 case GF_OMP_TARGET_KIND_EXIT_DATA:
12531 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12532 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12533 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12534 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12535 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12536 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12537 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12538 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12539 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12540 data_region = false;
12541 break;
12542 case GF_OMP_TARGET_KIND_DATA:
12543 case GF_OMP_TARGET_KIND_OACC_DATA:
12544 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12545 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12546 data_region = true;
12547 break;
12548 default:
12549 gcc_unreachable ();
12552 clauses = gimple_omp_target_clauses (stmt);
12554 gimple_seq dep_ilist = NULL;
12555 gimple_seq dep_olist = NULL;
12556 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12557 if (has_depend || in_reduction_clauses)
12559 push_gimplify_context ();
12560 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12561 if (has_depend)
12562 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12563 &dep_ilist, &dep_olist);
12564 if (in_reduction_clauses)
12565 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12566 ctx, NULL);
12569 tgt_bind = NULL;
12570 tgt_body = NULL;
12571 if (offloaded)
12573 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12574 tgt_body = gimple_bind_body (tgt_bind);
12576 else if (data_region)
12577 tgt_body = gimple_omp_body (stmt);
12578 child_fn = ctx->cb.dst_fn;
12580 push_gimplify_context ();
12581 fplist = NULL;
12583 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12584 switch (OMP_CLAUSE_CODE (c))
12586 tree var, x;
12588 default:
12589 break;
12590 case OMP_CLAUSE_MAP:
12591 #if CHECKING_P
12592 /* First check what we're prepared to handle in the following. */
12593 switch (OMP_CLAUSE_MAP_KIND (c))
12595 case GOMP_MAP_ALLOC:
12596 case GOMP_MAP_TO:
12597 case GOMP_MAP_FROM:
12598 case GOMP_MAP_TOFROM:
12599 case GOMP_MAP_POINTER:
12600 case GOMP_MAP_TO_PSET:
12601 case GOMP_MAP_DELETE:
12602 case GOMP_MAP_RELEASE:
12603 case GOMP_MAP_ALWAYS_TO:
12604 case GOMP_MAP_ALWAYS_FROM:
12605 case GOMP_MAP_ALWAYS_TOFROM:
12606 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12607 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12608 case GOMP_MAP_STRUCT:
12609 case GOMP_MAP_ALWAYS_POINTER:
12610 case GOMP_MAP_ATTACH:
12611 case GOMP_MAP_DETACH:
12612 break;
12613 case GOMP_MAP_IF_PRESENT:
12614 case GOMP_MAP_FORCE_ALLOC:
12615 case GOMP_MAP_FORCE_TO:
12616 case GOMP_MAP_FORCE_FROM:
12617 case GOMP_MAP_FORCE_TOFROM:
12618 case GOMP_MAP_FORCE_PRESENT:
12619 case GOMP_MAP_FORCE_DEVICEPTR:
12620 case GOMP_MAP_DEVICE_RESIDENT:
12621 case GOMP_MAP_LINK:
12622 case GOMP_MAP_FORCE_DETACH:
12623 gcc_assert (is_gimple_omp_oacc (stmt));
12624 break;
12625 default:
12626 gcc_unreachable ();
12628 #endif
12629 /* FALLTHRU */
12630 case OMP_CLAUSE_TO:
12631 case OMP_CLAUSE_FROM:
12632 oacc_firstprivate:
12633 var = OMP_CLAUSE_DECL (c);
12634 if (!DECL_P (var))
12636 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12637 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12638 && (OMP_CLAUSE_MAP_KIND (c)
12639 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12640 map_cnt++;
12641 continue;
12644 if (DECL_SIZE (var)
12645 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12647 tree var2 = DECL_VALUE_EXPR (var);
12648 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12649 var2 = TREE_OPERAND (var2, 0);
12650 gcc_assert (DECL_P (var2));
12651 var = var2;
12654 if (offloaded
12655 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12656 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12657 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12659 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12661 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12662 && varpool_node::get_create (var)->offloadable)
12663 continue;
12665 tree type = build_pointer_type (TREE_TYPE (var));
12666 tree new_var = lookup_decl (var, ctx);
12667 x = create_tmp_var_raw (type, get_name (new_var));
12668 gimple_add_tmp_var (x);
12669 x = build_simple_mem_ref (x);
12670 SET_DECL_VALUE_EXPR (new_var, x);
12671 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12673 continue;
12676 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12677 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12678 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12679 && is_omp_target (stmt))
12681 gcc_assert (maybe_lookup_field (c, ctx));
12682 map_cnt++;
12683 continue;
12686 if (!maybe_lookup_field (var, ctx))
12687 continue;
12689 /* Don't remap compute constructs' reduction variables, because the
12690 intermediate result must be local to each gang. */
12691 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12692 && is_gimple_omp_oacc (ctx->stmt)
12693 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12695 x = build_receiver_ref (var, true, ctx);
12696 tree new_var = lookup_decl (var, ctx);
12698 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12699 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12700 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12701 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12702 x = build_simple_mem_ref (x);
12703 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12705 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12706 if (omp_privatize_by_reference (new_var)
12707 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12708 || DECL_BY_REFERENCE (var)))
12710 /* Create a local object to hold the instance
12711 value. */
12712 tree type = TREE_TYPE (TREE_TYPE (new_var));
12713 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12714 tree inst = create_tmp_var (type, id);
12715 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12716 x = build_fold_addr_expr (inst);
12718 gimplify_assign (new_var, x, &fplist);
12720 else if (DECL_P (new_var))
12722 SET_DECL_VALUE_EXPR (new_var, x);
12723 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12725 else
12726 gcc_unreachable ();
12728 map_cnt++;
12729 break;
12731 case OMP_CLAUSE_FIRSTPRIVATE:
12732 gcc_checking_assert (offloaded);
12733 if (is_gimple_omp_oacc (ctx->stmt))
12735 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12736 gcc_checking_assert (!is_oacc_kernels (ctx));
12737 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12738 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12740 goto oacc_firstprivate;
12742 map_cnt++;
12743 var = OMP_CLAUSE_DECL (c);
12744 if (!omp_privatize_by_reference (var)
12745 && !is_gimple_reg_type (TREE_TYPE (var)))
12747 tree new_var = lookup_decl (var, ctx);
12748 if (is_variable_sized (var))
12750 tree pvar = DECL_VALUE_EXPR (var);
12751 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12752 pvar = TREE_OPERAND (pvar, 0);
12753 gcc_assert (DECL_P (pvar));
12754 tree new_pvar = lookup_decl (pvar, ctx);
12755 x = build_fold_indirect_ref (new_pvar);
12756 TREE_THIS_NOTRAP (x) = 1;
12758 else
12759 x = build_receiver_ref (var, true, ctx);
12760 SET_DECL_VALUE_EXPR (new_var, x);
12761 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12763 break;
12765 case OMP_CLAUSE_PRIVATE:
12766 gcc_checking_assert (offloaded);
12767 if (is_gimple_omp_oacc (ctx->stmt))
12769 /* No 'private' clauses on OpenACC 'kernels'. */
12770 gcc_checking_assert (!is_oacc_kernels (ctx));
12771 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12772 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12774 break;
12776 var = OMP_CLAUSE_DECL (c);
12777 if (is_variable_sized (var))
12779 tree new_var = lookup_decl (var, ctx);
12780 tree pvar = DECL_VALUE_EXPR (var);
12781 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12782 pvar = TREE_OPERAND (pvar, 0);
12783 gcc_assert (DECL_P (pvar));
12784 tree new_pvar = lookup_decl (pvar, ctx);
12785 x = build_fold_indirect_ref (new_pvar);
12786 TREE_THIS_NOTRAP (x) = 1;
12787 SET_DECL_VALUE_EXPR (new_var, x);
12788 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12790 break;
12792 case OMP_CLAUSE_USE_DEVICE_PTR:
12793 case OMP_CLAUSE_USE_DEVICE_ADDR:
12794 case OMP_CLAUSE_IS_DEVICE_PTR:
12795 var = OMP_CLAUSE_DECL (c);
12796 map_cnt++;
12797 if (is_variable_sized (var))
12799 tree new_var = lookup_decl (var, ctx);
12800 tree pvar = DECL_VALUE_EXPR (var);
12801 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12802 pvar = TREE_OPERAND (pvar, 0);
12803 gcc_assert (DECL_P (pvar));
12804 tree new_pvar = lookup_decl (pvar, ctx);
12805 x = build_fold_indirect_ref (new_pvar);
12806 TREE_THIS_NOTRAP (x) = 1;
12807 SET_DECL_VALUE_EXPR (new_var, x);
12808 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12810 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12811 && !omp_privatize_by_reference (var)
12812 && !omp_is_allocatable_or_ptr (var)
12813 && !lang_hooks.decls.omp_array_data (var, true))
12814 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12816 tree new_var = lookup_decl (var, ctx);
12817 tree type = build_pointer_type (TREE_TYPE (var));
12818 x = create_tmp_var_raw (type, get_name (new_var));
12819 gimple_add_tmp_var (x);
12820 x = build_simple_mem_ref (x);
12821 SET_DECL_VALUE_EXPR (new_var, x);
12822 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12824 else
12826 tree new_var = lookup_decl (var, ctx);
12827 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12828 gimple_add_tmp_var (x);
12829 SET_DECL_VALUE_EXPR (new_var, x);
12830 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12832 break;
12835 if (offloaded)
12837 target_nesting_level++;
12838 lower_omp (&tgt_body, ctx);
12839 target_nesting_level--;
12841 else if (data_region)
12842 lower_omp (&tgt_body, ctx);
12844 if (offloaded)
12846 /* Declare all the variables created by mapping and the variables
12847 declared in the scope of the target body. */
12848 record_vars_into (ctx->block_vars, child_fn);
12849 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
12850 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
12853 olist = NULL;
12854 ilist = NULL;
12855 if (ctx->record_type)
12857 ctx->sender_decl
12858 = create_tmp_var (ctx->record_type, ".omp_data_arr");
12859 DECL_NAMELESS (ctx->sender_decl) = 1;
12860 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12861 t = make_tree_vec (3);
12862 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12863 TREE_VEC_ELT (t, 1)
12864 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12865 ".omp_data_sizes");
12866 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12867 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12868 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12869 tree tkind_type = short_unsigned_type_node;
12870 int talign_shift = 8;
12871 TREE_VEC_ELT (t, 2)
12872 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12873 ".omp_data_kinds");
12874 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12875 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12876 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12877 gimple_omp_target_set_data_arg (stmt, t);
12879 vec<constructor_elt, va_gc> *vsize;
12880 vec<constructor_elt, va_gc> *vkind;
12881 vec_alloc (vsize, map_cnt);
12882 vec_alloc (vkind, map_cnt);
12883 unsigned int map_idx = 0;
12885 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12886 switch (OMP_CLAUSE_CODE (c))
12888 tree ovar, nc, s, purpose, var, x, type;
12889 unsigned int talign;
12891 default:
12892 break;
12894 case OMP_CLAUSE_MAP:
12895 case OMP_CLAUSE_TO:
12896 case OMP_CLAUSE_FROM:
12897 oacc_firstprivate_map:
12898 nc = c;
12899 ovar = OMP_CLAUSE_DECL (c);
12900 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12901 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12902 || (OMP_CLAUSE_MAP_KIND (c)
12903 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12904 break;
12905 if (!DECL_P (ovar))
12907 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12908 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
12910 nc = OMP_CLAUSE_CHAIN (c);
12911 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
12912 == get_base_address (ovar));
12913 ovar = OMP_CLAUSE_DECL (nc);
12915 else
12917 tree x = build_sender_ref (ovar, ctx);
12918 tree v = ovar;
12919 if (in_reduction_clauses
12920 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12921 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
12923 v = unshare_expr (v);
12924 tree *p = &v;
12925 while (handled_component_p (*p)
12926 || TREE_CODE (*p) == INDIRECT_REF
12927 || TREE_CODE (*p) == ADDR_EXPR
12928 || TREE_CODE (*p) == MEM_REF
12929 || TREE_CODE (*p) == NON_LVALUE_EXPR)
12930 p = &TREE_OPERAND (*p, 0);
12931 tree d = *p;
12932 if (is_variable_sized (d))
12934 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
12935 d = DECL_VALUE_EXPR (d);
12936 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
12937 d = TREE_OPERAND (d, 0);
12938 gcc_assert (DECL_P (d));
12940 splay_tree_key key
12941 = (splay_tree_key) &DECL_CONTEXT (d);
12942 tree nd = (tree) splay_tree_lookup (ctx->field_map,
12943 key)->value;
12944 if (d == *p)
12945 *p = nd;
12946 else
12947 *p = build_fold_indirect_ref (nd);
12949 v = build_fold_addr_expr_with_type (v, ptr_type_node);
12950 gimplify_assign (x, v, &ilist);
12951 nc = NULL_TREE;
12954 else
12956 if (DECL_SIZE (ovar)
12957 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
12959 tree ovar2 = DECL_VALUE_EXPR (ovar);
12960 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
12961 ovar2 = TREE_OPERAND (ovar2, 0);
12962 gcc_assert (DECL_P (ovar2));
12963 ovar = ovar2;
12965 if (!maybe_lookup_field (ovar, ctx)
12966 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12967 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12968 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
12969 continue;
12972 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
12973 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
12974 talign = DECL_ALIGN_UNIT (ovar);
12976 var = NULL_TREE;
12977 if (nc)
12979 if (in_reduction_clauses
12980 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12981 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
12983 tree d = ovar;
12984 if (is_variable_sized (d))
12986 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
12987 d = DECL_VALUE_EXPR (d);
12988 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
12989 d = TREE_OPERAND (d, 0);
12990 gcc_assert (DECL_P (d));
12992 splay_tree_key key
12993 = (splay_tree_key) &DECL_CONTEXT (d);
12994 tree nd = (tree) splay_tree_lookup (ctx->field_map,
12995 key)->value;
12996 if (d == ovar)
12997 var = nd;
12998 else
12999 var = build_fold_indirect_ref (nd);
13001 else
13002 var = lookup_decl_in_outer_ctx (ovar, ctx);
13004 if (nc
13005 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13006 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13007 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13008 && is_omp_target (stmt))
13010 x = build_sender_ref (c, ctx);
13011 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13013 else if (nc)
13015 x = build_sender_ref (ovar, ctx);
13017 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13018 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13019 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13020 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13022 gcc_assert (offloaded);
13023 tree avar
13024 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13025 mark_addressable (avar);
13026 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13027 talign = DECL_ALIGN_UNIT (avar);
13028 avar = build_fold_addr_expr (avar);
13029 gimplify_assign (x, avar, &ilist);
13031 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13033 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13034 if (!omp_privatize_by_reference (var))
13036 if (is_gimple_reg (var)
13037 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13038 suppress_warning (var);
13039 var = build_fold_addr_expr (var);
13041 else
13042 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13043 gimplify_assign (x, var, &ilist);
13045 else if (is_gimple_reg (var))
13047 gcc_assert (offloaded);
13048 tree avar = create_tmp_var (TREE_TYPE (var));
13049 mark_addressable (avar);
13050 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13051 if (GOMP_MAP_COPY_TO_P (map_kind)
13052 || map_kind == GOMP_MAP_POINTER
13053 || map_kind == GOMP_MAP_TO_PSET
13054 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13056 /* If we need to initialize a temporary
13057 with VAR because it is not addressable, and
13058 the variable hasn't been initialized yet, then
13059 we'll get a warning for the store to avar.
13060 Don't warn in that case, the mapping might
13061 be implicit. */
13062 suppress_warning (var, OPT_Wuninitialized);
13063 gimplify_assign (avar, var, &ilist);
13065 avar = build_fold_addr_expr (avar);
13066 gimplify_assign (x, avar, &ilist);
13067 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13068 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13069 && !TYPE_READONLY (TREE_TYPE (var)))
13071 x = unshare_expr (x);
13072 x = build_simple_mem_ref (x);
13073 gimplify_assign (var, x, &olist);
13076 else
13078 /* While MAP is handled explicitly by the FE,
13079 for 'target update', only the identified is passed. */
13080 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13081 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13082 && (omp_is_allocatable_or_ptr (var)
13083 && omp_check_optional_argument (var, false)))
13084 var = build_fold_indirect_ref (var);
13085 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13086 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13087 || (!omp_is_allocatable_or_ptr (var)
13088 && !omp_check_optional_argument (var, false)))
13089 var = build_fold_addr_expr (var);
13090 gimplify_assign (x, var, &ilist);
13093 s = NULL_TREE;
13094 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13096 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13097 s = TREE_TYPE (ovar);
13098 if (TREE_CODE (s) == REFERENCE_TYPE
13099 || omp_check_optional_argument (ovar, false))
13100 s = TREE_TYPE (s);
13101 s = TYPE_SIZE_UNIT (s);
13103 else
13104 s = OMP_CLAUSE_SIZE (c);
13105 if (s == NULL_TREE)
13106 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13107 s = fold_convert (size_type_node, s);
13108 purpose = size_int (map_idx++);
13109 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13110 if (TREE_CODE (s) != INTEGER_CST)
13111 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13113 unsigned HOST_WIDE_INT tkind, tkind_zero;
13114 switch (OMP_CLAUSE_CODE (c))
13116 case OMP_CLAUSE_MAP:
13117 tkind = OMP_CLAUSE_MAP_KIND (c);
13118 tkind_zero = tkind;
13119 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13120 switch (tkind)
13122 case GOMP_MAP_ALLOC:
13123 case GOMP_MAP_IF_PRESENT:
13124 case GOMP_MAP_TO:
13125 case GOMP_MAP_FROM:
13126 case GOMP_MAP_TOFROM:
13127 case GOMP_MAP_ALWAYS_TO:
13128 case GOMP_MAP_ALWAYS_FROM:
13129 case GOMP_MAP_ALWAYS_TOFROM:
13130 case GOMP_MAP_RELEASE:
13131 case GOMP_MAP_FORCE_TO:
13132 case GOMP_MAP_FORCE_FROM:
13133 case GOMP_MAP_FORCE_TOFROM:
13134 case GOMP_MAP_FORCE_PRESENT:
13135 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13136 break;
13137 case GOMP_MAP_DELETE:
13138 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13139 default:
13140 break;
13142 if (tkind_zero != tkind)
13144 if (integer_zerop (s))
13145 tkind = tkind_zero;
13146 else if (integer_nonzerop (s))
13147 tkind_zero = tkind;
13149 break;
13150 case OMP_CLAUSE_FIRSTPRIVATE:
13151 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13152 tkind = GOMP_MAP_TO;
13153 tkind_zero = tkind;
13154 break;
13155 case OMP_CLAUSE_TO:
13156 tkind = GOMP_MAP_TO;
13157 tkind_zero = tkind;
13158 break;
13159 case OMP_CLAUSE_FROM:
13160 tkind = GOMP_MAP_FROM;
13161 tkind_zero = tkind;
13162 break;
13163 default:
13164 gcc_unreachable ();
13166 gcc_checking_assert (tkind
13167 < (HOST_WIDE_INT_C (1U) << talign_shift));
13168 gcc_checking_assert (tkind_zero
13169 < (HOST_WIDE_INT_C (1U) << talign_shift));
13170 talign = ceil_log2 (talign);
13171 tkind |= talign << talign_shift;
13172 tkind_zero |= talign << talign_shift;
13173 gcc_checking_assert (tkind
13174 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13175 gcc_checking_assert (tkind_zero
13176 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13177 if (tkind == tkind_zero)
13178 x = build_int_cstu (tkind_type, tkind);
13179 else
13181 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13182 x = build3 (COND_EXPR, tkind_type,
13183 fold_build2 (EQ_EXPR, boolean_type_node,
13184 unshare_expr (s), size_zero_node),
13185 build_int_cstu (tkind_type, tkind_zero),
13186 build_int_cstu (tkind_type, tkind));
13188 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13189 if (nc && nc != c)
13190 c = nc;
13191 break;
13193 case OMP_CLAUSE_FIRSTPRIVATE:
13194 if (is_gimple_omp_oacc (ctx->stmt))
13195 goto oacc_firstprivate_map;
13196 ovar = OMP_CLAUSE_DECL (c);
13197 if (omp_privatize_by_reference (ovar))
13198 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13199 else
13200 talign = DECL_ALIGN_UNIT (ovar);
13201 var = lookup_decl_in_outer_ctx (ovar, ctx);
13202 x = build_sender_ref (ovar, ctx);
13203 tkind = GOMP_MAP_FIRSTPRIVATE;
13204 type = TREE_TYPE (ovar);
13205 if (omp_privatize_by_reference (ovar))
13206 type = TREE_TYPE (type);
13207 if ((INTEGRAL_TYPE_P (type)
13208 && TYPE_PRECISION (type) <= POINTER_SIZE)
13209 || TREE_CODE (type) == POINTER_TYPE)
13211 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13212 tree t = var;
13213 if (omp_privatize_by_reference (var))
13214 t = build_simple_mem_ref (var);
13215 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13216 suppress_warning (var);
13217 if (TREE_CODE (type) != POINTER_TYPE)
13218 t = fold_convert (pointer_sized_int_node, t);
13219 t = fold_convert (TREE_TYPE (x), t);
13220 gimplify_assign (x, t, &ilist);
13222 else if (omp_privatize_by_reference (var))
13223 gimplify_assign (x, var, &ilist);
13224 else if (is_gimple_reg (var))
13226 tree avar = create_tmp_var (TREE_TYPE (var));
13227 mark_addressable (avar);
13228 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13229 suppress_warning (var);
13230 gimplify_assign (avar, var, &ilist);
13231 avar = build_fold_addr_expr (avar);
13232 gimplify_assign (x, avar, &ilist);
13234 else
13236 var = build_fold_addr_expr (var);
13237 gimplify_assign (x, var, &ilist);
13239 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13240 s = size_int (0);
13241 else if (omp_privatize_by_reference (ovar))
13242 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13243 else
13244 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13245 s = fold_convert (size_type_node, s);
13246 purpose = size_int (map_idx++);
13247 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13248 if (TREE_CODE (s) != INTEGER_CST)
13249 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13251 gcc_checking_assert (tkind
13252 < (HOST_WIDE_INT_C (1U) << talign_shift));
13253 talign = ceil_log2 (talign);
13254 tkind |= talign << talign_shift;
13255 gcc_checking_assert (tkind
13256 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13257 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13258 build_int_cstu (tkind_type, tkind));
13259 break;
13261 case OMP_CLAUSE_USE_DEVICE_PTR:
13262 case OMP_CLAUSE_USE_DEVICE_ADDR:
13263 case OMP_CLAUSE_IS_DEVICE_PTR:
13264 ovar = OMP_CLAUSE_DECL (c);
13265 var = lookup_decl_in_outer_ctx (ovar, ctx);
13267 if (lang_hooks.decls.omp_array_data (ovar, true))
13269 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13270 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13271 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13273 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13275 tkind = GOMP_MAP_USE_DEVICE_PTR;
13276 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13278 else
13280 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13281 x = build_sender_ref (ovar, ctx);
13284 if (is_gimple_omp_oacc (ctx->stmt))
13286 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13288 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13289 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13292 type = TREE_TYPE (ovar);
13293 if (lang_hooks.decls.omp_array_data (ovar, true))
13294 var = lang_hooks.decls.omp_array_data (ovar, false);
13295 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13296 && !omp_privatize_by_reference (ovar)
13297 && !omp_is_allocatable_or_ptr (ovar))
13298 || TREE_CODE (type) == ARRAY_TYPE)
13299 var = build_fold_addr_expr (var);
13300 else
13302 if (omp_privatize_by_reference (ovar)
13303 || omp_check_optional_argument (ovar, false)
13304 || omp_is_allocatable_or_ptr (ovar))
13306 type = TREE_TYPE (type);
13307 if (POINTER_TYPE_P (type)
13308 && TREE_CODE (type) != ARRAY_TYPE
13309 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13310 && !omp_is_allocatable_or_ptr (ovar))
13311 || (omp_privatize_by_reference (ovar)
13312 && omp_is_allocatable_or_ptr (ovar))))
13313 var = build_simple_mem_ref (var);
13314 var = fold_convert (TREE_TYPE (x), var);
13317 tree present;
13318 present = omp_check_optional_argument (ovar, true);
13319 if (present)
13321 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13322 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13323 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13324 tree new_x = unshare_expr (x);
13325 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13326 fb_rvalue);
13327 gcond *cond = gimple_build_cond_from_tree (present,
13328 notnull_label,
13329 null_label);
13330 gimple_seq_add_stmt (&ilist, cond);
13331 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13332 gimplify_assign (new_x, null_pointer_node, &ilist);
13333 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13334 gimple_seq_add_stmt (&ilist,
13335 gimple_build_label (notnull_label));
13336 gimplify_assign (x, var, &ilist);
13337 gimple_seq_add_stmt (&ilist,
13338 gimple_build_label (opt_arg_label));
13340 else
13341 gimplify_assign (x, var, &ilist);
13342 s = size_int (0);
13343 purpose = size_int (map_idx++);
13344 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13345 gcc_checking_assert (tkind
13346 < (HOST_WIDE_INT_C (1U) << talign_shift));
13347 gcc_checking_assert (tkind
13348 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13349 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13350 build_int_cstu (tkind_type, tkind));
13351 break;
13354 gcc_assert (map_idx == map_cnt);
13356 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13357 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13358 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13359 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13360 for (int i = 1; i <= 2; i++)
13361 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13363 gimple_seq initlist = NULL;
13364 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13365 TREE_VEC_ELT (t, i)),
13366 &initlist, true, NULL_TREE);
13367 gimple_seq_add_seq (&ilist, initlist);
13369 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13370 gimple_seq_add_stmt (&olist,
13371 gimple_build_assign (TREE_VEC_ELT (t, i),
13372 clobber));
13374 else if (omp_maybe_offloaded_ctx (ctx->outer))
13376 tree id = get_identifier ("omp declare target");
13377 tree decl = TREE_VEC_ELT (t, i);
13378 DECL_ATTRIBUTES (decl)
13379 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13380 varpool_node *node = varpool_node::get (decl);
13381 if (node)
13383 node->offloadable = 1;
13384 if (ENABLE_OFFLOADING)
13386 g->have_offload = true;
13387 vec_safe_push (offload_vars, t);
13392 tree clobber = build_clobber (ctx->record_type);
13393 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13394 clobber));
13397 /* Once all the expansions are done, sequence all the different
13398 fragments inside gimple_omp_body. */
13400 new_body = NULL;
13402 if (offloaded
13403 && ctx->record_type)
13405 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13406 /* fixup_child_record_type might have changed receiver_decl's type. */
13407 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13408 gimple_seq_add_stmt (&new_body,
13409 gimple_build_assign (ctx->receiver_decl, t));
13411 gimple_seq_add_seq (&new_body, fplist);
13413 if (offloaded || data_region)
13415 tree prev = NULL_TREE;
13416 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13417 switch (OMP_CLAUSE_CODE (c))
13419 tree var, x;
13420 default:
13421 break;
13422 case OMP_CLAUSE_FIRSTPRIVATE:
13423 if (is_gimple_omp_oacc (ctx->stmt))
13424 break;
13425 var = OMP_CLAUSE_DECL (c);
13426 if (omp_privatize_by_reference (var)
13427 || is_gimple_reg_type (TREE_TYPE (var)))
13429 tree new_var = lookup_decl (var, ctx);
13430 tree type;
13431 type = TREE_TYPE (var);
13432 if (omp_privatize_by_reference (var))
13433 type = TREE_TYPE (type);
13434 if ((INTEGRAL_TYPE_P (type)
13435 && TYPE_PRECISION (type) <= POINTER_SIZE)
13436 || TREE_CODE (type) == POINTER_TYPE)
13438 x = build_receiver_ref (var, false, ctx);
13439 if (TREE_CODE (type) != POINTER_TYPE)
13440 x = fold_convert (pointer_sized_int_node, x);
13441 x = fold_convert (type, x);
13442 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13443 fb_rvalue);
13444 if (omp_privatize_by_reference (var))
13446 tree v = create_tmp_var_raw (type, get_name (var));
13447 gimple_add_tmp_var (v);
13448 TREE_ADDRESSABLE (v) = 1;
13449 gimple_seq_add_stmt (&new_body,
13450 gimple_build_assign (v, x));
13451 x = build_fold_addr_expr (v);
13453 gimple_seq_add_stmt (&new_body,
13454 gimple_build_assign (new_var, x));
13456 else
13458 bool by_ref = !omp_privatize_by_reference (var);
13459 x = build_receiver_ref (var, by_ref, ctx);
13460 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13461 fb_rvalue);
13462 gimple_seq_add_stmt (&new_body,
13463 gimple_build_assign (new_var, x));
13466 else if (is_variable_sized (var))
13468 tree pvar = DECL_VALUE_EXPR (var);
13469 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13470 pvar = TREE_OPERAND (pvar, 0);
13471 gcc_assert (DECL_P (pvar));
13472 tree new_var = lookup_decl (pvar, ctx);
13473 x = build_receiver_ref (var, false, ctx);
13474 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13475 gimple_seq_add_stmt (&new_body,
13476 gimple_build_assign (new_var, x));
13478 break;
13479 case OMP_CLAUSE_PRIVATE:
13480 if (is_gimple_omp_oacc (ctx->stmt))
13481 break;
13482 var = OMP_CLAUSE_DECL (c);
13483 if (omp_privatize_by_reference (var))
13485 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13486 tree new_var = lookup_decl (var, ctx);
13487 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13488 if (TREE_CONSTANT (x))
13490 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13491 get_name (var));
13492 gimple_add_tmp_var (x);
13493 TREE_ADDRESSABLE (x) = 1;
13494 x = build_fold_addr_expr_loc (clause_loc, x);
13496 else
13497 break;
13499 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13500 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13501 gimple_seq_add_stmt (&new_body,
13502 gimple_build_assign (new_var, x));
13504 break;
13505 case OMP_CLAUSE_USE_DEVICE_PTR:
13506 case OMP_CLAUSE_USE_DEVICE_ADDR:
13507 case OMP_CLAUSE_IS_DEVICE_PTR:
13508 tree new_var;
13509 gimple_seq assign_body;
13510 bool is_array_data;
13511 bool do_optional_check;
13512 assign_body = NULL;
13513 do_optional_check = false;
13514 var = OMP_CLAUSE_DECL (c);
13515 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13517 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13518 x = build_sender_ref (is_array_data
13519 ? (splay_tree_key) &DECL_NAME (var)
13520 : (splay_tree_key) &DECL_UID (var), ctx);
13521 else
13522 x = build_receiver_ref (var, false, ctx);
13524 if (is_array_data)
13526 bool is_ref = omp_privatize_by_reference (var);
13527 do_optional_check = true;
13528 /* First, we copy the descriptor data from the host; then
13529 we update its data to point to the target address. */
13530 new_var = lookup_decl (var, ctx);
13531 new_var = DECL_VALUE_EXPR (new_var);
13532 tree v = new_var;
13534 if (is_ref)
13536 var = build_fold_indirect_ref (var);
13537 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
13538 fb_rvalue);
13539 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
13540 gimple_add_tmp_var (v);
13541 TREE_ADDRESSABLE (v) = 1;
13542 gimple_seq_add_stmt (&assign_body,
13543 gimple_build_assign (v, var));
13544 tree rhs = build_fold_addr_expr (v);
13545 gimple_seq_add_stmt (&assign_body,
13546 gimple_build_assign (new_var, rhs));
13548 else
13549 gimple_seq_add_stmt (&assign_body,
13550 gimple_build_assign (new_var, var));
13552 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13553 gcc_assert (v2);
13554 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13555 gimple_seq_add_stmt (&assign_body,
13556 gimple_build_assign (v2, x));
13558 else if (is_variable_sized (var))
13560 tree pvar = DECL_VALUE_EXPR (var);
13561 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13562 pvar = TREE_OPERAND (pvar, 0);
13563 gcc_assert (DECL_P (pvar));
13564 new_var = lookup_decl (pvar, ctx);
13565 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13566 gimple_seq_add_stmt (&assign_body,
13567 gimple_build_assign (new_var, x));
13569 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13570 && !omp_privatize_by_reference (var)
13571 && !omp_is_allocatable_or_ptr (var))
13572 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13574 new_var = lookup_decl (var, ctx);
13575 new_var = DECL_VALUE_EXPR (new_var);
13576 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13577 new_var = TREE_OPERAND (new_var, 0);
13578 gcc_assert (DECL_P (new_var));
13579 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13580 gimple_seq_add_stmt (&assign_body,
13581 gimple_build_assign (new_var, x));
13583 else
13585 tree type = TREE_TYPE (var);
13586 new_var = lookup_decl (var, ctx);
13587 if (omp_privatize_by_reference (var))
13589 type = TREE_TYPE (type);
13590 if (POINTER_TYPE_P (type)
13591 && TREE_CODE (type) != ARRAY_TYPE
13592 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13593 || (omp_privatize_by_reference (var)
13594 && omp_is_allocatable_or_ptr (var))))
13596 tree v = create_tmp_var_raw (type, get_name (var));
13597 gimple_add_tmp_var (v);
13598 TREE_ADDRESSABLE (v) = 1;
13599 x = fold_convert (type, x);
13600 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13601 fb_rvalue);
13602 gimple_seq_add_stmt (&assign_body,
13603 gimple_build_assign (v, x));
13604 x = build_fold_addr_expr (v);
13605 do_optional_check = true;
13608 new_var = DECL_VALUE_EXPR (new_var);
13609 x = fold_convert (TREE_TYPE (new_var), x);
13610 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13611 gimple_seq_add_stmt (&assign_body,
13612 gimple_build_assign (new_var, x));
13614 tree present;
13615 present = (do_optional_check
13616 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13617 : NULL_TREE);
13618 if (present)
13620 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13621 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13622 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13623 glabel *null_glabel = gimple_build_label (null_label);
13624 glabel *notnull_glabel = gimple_build_label (notnull_label);
13625 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13626 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13627 fb_rvalue);
13628 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13629 fb_rvalue);
13630 gcond *cond = gimple_build_cond_from_tree (present,
13631 notnull_label,
13632 null_label);
13633 gimple_seq_add_stmt (&new_body, cond);
13634 gimple_seq_add_stmt (&new_body, null_glabel);
13635 gimplify_assign (new_var, null_pointer_node, &new_body);
13636 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13637 gimple_seq_add_stmt (&new_body, notnull_glabel);
13638 gimple_seq_add_seq (&new_body, assign_body);
13639 gimple_seq_add_stmt (&new_body,
13640 gimple_build_label (opt_arg_label));
13642 else
13643 gimple_seq_add_seq (&new_body, assign_body);
13644 break;
13646 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13647 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13648 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13649 or references to VLAs. */
13650 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13651 switch (OMP_CLAUSE_CODE (c))
13653 tree var;
13654 default:
13655 break;
13656 case OMP_CLAUSE_MAP:
13657 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13658 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13660 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13661 poly_int64 offset = 0;
13662 gcc_assert (prev);
13663 var = OMP_CLAUSE_DECL (c);
13664 if (DECL_P (var)
13665 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13666 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13667 ctx))
13668 && varpool_node::get_create (var)->offloadable)
13669 break;
13670 if (TREE_CODE (var) == INDIRECT_REF
13671 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
13672 var = TREE_OPERAND (var, 0);
13673 if (TREE_CODE (var) == COMPONENT_REF)
13675 var = get_addr_base_and_unit_offset (var, &offset);
13676 gcc_assert (var != NULL_TREE && DECL_P (var));
13678 else if (DECL_SIZE (var)
13679 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
13681 tree var2 = DECL_VALUE_EXPR (var);
13682 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
13683 var2 = TREE_OPERAND (var2, 0);
13684 gcc_assert (DECL_P (var2));
13685 var = var2;
13687 tree new_var = lookup_decl (var, ctx), x;
13688 tree type = TREE_TYPE (new_var);
13689 bool is_ref;
13690 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
13691 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13692 == COMPONENT_REF))
13694 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
13695 is_ref = true;
13696 new_var = build2 (MEM_REF, type,
13697 build_fold_addr_expr (new_var),
13698 build_int_cst (build_pointer_type (type),
13699 offset));
13701 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
13703 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
13704 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
13705 new_var = build2 (MEM_REF, type,
13706 build_fold_addr_expr (new_var),
13707 build_int_cst (build_pointer_type (type),
13708 offset));
13710 else
13711 is_ref = omp_privatize_by_reference (var);
13712 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13713 is_ref = false;
13714 bool ref_to_array = false;
13715 if (is_ref)
13717 type = TREE_TYPE (type);
13718 if (TREE_CODE (type) == ARRAY_TYPE)
13720 type = build_pointer_type (type);
13721 ref_to_array = true;
13724 else if (TREE_CODE (type) == ARRAY_TYPE)
13726 tree decl2 = DECL_VALUE_EXPR (new_var);
13727 gcc_assert (TREE_CODE (decl2) == MEM_REF);
13728 decl2 = TREE_OPERAND (decl2, 0);
13729 gcc_assert (DECL_P (decl2));
13730 new_var = decl2;
13731 type = TREE_TYPE (new_var);
13733 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
13734 x = fold_convert_loc (clause_loc, type, x);
13735 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
13737 tree bias = OMP_CLAUSE_SIZE (c);
13738 if (DECL_P (bias))
13739 bias = lookup_decl (bias, ctx);
13740 bias = fold_convert_loc (clause_loc, sizetype, bias);
13741 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
13742 bias);
13743 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
13744 TREE_TYPE (x), x, bias);
13746 if (ref_to_array)
13747 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13748 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13749 if (is_ref && !ref_to_array)
13751 tree t = create_tmp_var_raw (type, get_name (var));
13752 gimple_add_tmp_var (t);
13753 TREE_ADDRESSABLE (t) = 1;
13754 gimple_seq_add_stmt (&new_body,
13755 gimple_build_assign (t, x));
13756 x = build_fold_addr_expr_loc (clause_loc, t);
13758 gimple_seq_add_stmt (&new_body,
13759 gimple_build_assign (new_var, x));
13760 prev = NULL_TREE;
13762 else if (OMP_CLAUSE_CHAIN (c)
13763 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
13764 == OMP_CLAUSE_MAP
13765 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13766 == GOMP_MAP_FIRSTPRIVATE_POINTER
13767 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13768 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13769 prev = c;
13770 break;
13771 case OMP_CLAUSE_PRIVATE:
13772 var = OMP_CLAUSE_DECL (c);
13773 if (is_variable_sized (var))
13775 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13776 tree new_var = lookup_decl (var, ctx);
13777 tree pvar = DECL_VALUE_EXPR (var);
13778 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13779 pvar = TREE_OPERAND (pvar, 0);
13780 gcc_assert (DECL_P (pvar));
13781 tree new_pvar = lookup_decl (pvar, ctx);
13782 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13783 tree al = size_int (DECL_ALIGN (var));
13784 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
13785 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13786 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
13787 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13788 gimple_seq_add_stmt (&new_body,
13789 gimple_build_assign (new_pvar, x));
13791 else if (omp_privatize_by_reference (var)
13792 && !is_gimple_omp_oacc (ctx->stmt))
13794 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13795 tree new_var = lookup_decl (var, ctx);
13796 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13797 if (TREE_CONSTANT (x))
13798 break;
13799 else
13801 tree atmp
13802 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13803 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
13804 tree al = size_int (TYPE_ALIGN (rtype));
13805 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13808 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13809 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13810 gimple_seq_add_stmt (&new_body,
13811 gimple_build_assign (new_var, x));
13813 break;
13816 gimple_seq fork_seq = NULL;
13817 gimple_seq join_seq = NULL;
13819 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
13821 /* If there are reductions on the offloaded region itself, treat
13822 them as a dummy GANG loop. */
13823 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
13825 gcall *private_marker = lower_oacc_private_marker (ctx);
13827 if (private_marker)
13828 gimple_call_set_arg (private_marker, 2, level);
13830 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
13831 false, NULL, private_marker, NULL, &fork_seq,
13832 &join_seq, ctx);
13835 gimple_seq_add_seq (&new_body, fork_seq);
13836 gimple_seq_add_seq (&new_body, tgt_body);
13837 gimple_seq_add_seq (&new_body, join_seq);
13839 if (offloaded)
13841 new_body = maybe_catch_exception (new_body);
13842 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
13844 gimple_omp_set_body (stmt, new_body);
13847 bind = gimple_build_bind (NULL, NULL,
13848 tgt_bind ? gimple_bind_block (tgt_bind)
13849 : NULL_TREE);
13850 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
13851 gimple_bind_add_seq (bind, ilist);
13852 gimple_bind_add_stmt (bind, stmt);
13853 gimple_bind_add_seq (bind, olist);
13855 pop_gimplify_context (NULL);
13857 if (dep_bind)
13859 gimple_bind_add_seq (dep_bind, dep_ilist);
13860 gimple_bind_add_stmt (dep_bind, bind);
13861 gimple_bind_add_seq (dep_bind, dep_olist);
13862 pop_gimplify_context (dep_bind);
13866 /* Expand code for an OpenMP teams directive. */
13868 static void
13869 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13871 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
13872 push_gimplify_context ();
13874 tree block = make_node (BLOCK);
13875 gbind *bind = gimple_build_bind (NULL, NULL, block);
13876 gsi_replace (gsi_p, bind, true);
13877 gimple_seq bind_body = NULL;
13878 gimple_seq dlist = NULL;
13879 gimple_seq olist = NULL;
13881 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13882 OMP_CLAUSE_NUM_TEAMS);
13883 if (num_teams == NULL_TREE)
13884 num_teams = build_int_cst (unsigned_type_node, 0);
13885 else
13887 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
13888 num_teams = fold_convert (unsigned_type_node, num_teams);
13889 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
13891 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13892 OMP_CLAUSE_THREAD_LIMIT);
13893 if (thread_limit == NULL_TREE)
13894 thread_limit = build_int_cst (unsigned_type_node, 0);
13895 else
13897 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
13898 thread_limit = fold_convert (unsigned_type_node, thread_limit);
13899 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
13900 fb_rvalue);
13903 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
13904 &bind_body, &dlist, ctx, NULL);
13905 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
13906 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
13907 NULL, ctx);
13908 gimple_seq_add_stmt (&bind_body, teams_stmt);
13910 location_t loc = gimple_location (teams_stmt);
13911 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
13912 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
13913 gimple_set_location (call, loc);
13914 gimple_seq_add_stmt (&bind_body, call);
13916 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
13917 gimple_omp_set_body (teams_stmt, NULL);
13918 gimple_seq_add_seq (&bind_body, olist);
13919 gimple_seq_add_seq (&bind_body, dlist);
13920 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
13921 gimple_bind_set_body (bind, bind_body);
13923 pop_gimplify_context (bind);
13925 gimple_bind_append_vars (bind, ctx->block_vars);
13926 BLOCK_VARS (block) = ctx->block_vars;
13927 if (BLOCK_VARS (block))
13928 TREE_USED (block) = 1;
13931 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
13932 regimplified. If DATA is non-NULL, lower_omp_1 is outside
13933 of OMP context, but with task_shared_vars set. */
13935 static tree
13936 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
13937 void *data)
13939 tree t = *tp;
13941 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
13942 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
13943 && data == NULL
13944 && DECL_HAS_VALUE_EXPR_P (t))
13945 return t;
13947 if (task_shared_vars
13948 && DECL_P (t)
13949 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
13950 return t;
13952 /* If a global variable has been privatized, TREE_CONSTANT on
13953 ADDR_EXPR might be wrong. */
13954 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
13955 recompute_tree_invariant_for_addr_expr (t);
13957 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
13958 return NULL_TREE;
13961 /* Data to be communicated between lower_omp_regimplify_operands and
13962 lower_omp_regimplify_operands_p. */
13964 struct lower_omp_regimplify_operands_data
13966 omp_context *ctx;
13967 vec<tree> *decls;
13970 /* Helper function for lower_omp_regimplify_operands. Find
13971 omp_member_access_dummy_var vars and adjust temporarily their
13972 DECL_VALUE_EXPRs if needed. */
13974 static tree
13975 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
13976 void *data)
13978 tree t = omp_member_access_dummy_var (*tp);
13979 if (t)
13981 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
13982 lower_omp_regimplify_operands_data *ldata
13983 = (lower_omp_regimplify_operands_data *) wi->info;
13984 tree o = maybe_lookup_decl (t, ldata->ctx);
13985 if (o != t)
13987 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
13988 ldata->decls->safe_push (*tp);
13989 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
13990 SET_DECL_VALUE_EXPR (*tp, v);
13993 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
13994 return NULL_TREE;
13997 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
13998 of omp_member_access_dummy_var vars during regimplification. */
14000 static void
14001 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
14002 gimple_stmt_iterator *gsi_p)
14004 auto_vec<tree, 10> decls;
14005 if (ctx)
14007 struct walk_stmt_info wi;
14008 memset (&wi, '\0', sizeof (wi));
14009 struct lower_omp_regimplify_operands_data data;
14010 data.ctx = ctx;
14011 data.decls = &decls;
14012 wi.info = &data;
14013 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14015 gimple_regimplify_operands (stmt, gsi_p);
14016 while (!decls.is_empty ())
14018 tree t = decls.pop ();
14019 tree v = decls.pop ();
14020 SET_DECL_VALUE_EXPR (t, v);
14024 static void
14025 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14027 gimple *stmt = gsi_stmt (*gsi_p);
14028 struct walk_stmt_info wi;
14029 gcall *call_stmt;
14031 if (gimple_has_location (stmt))
14032 input_location = gimple_location (stmt);
14034 if (task_shared_vars)
14035 memset (&wi, '\0', sizeof (wi));
14037 /* If we have issued syntax errors, avoid doing any heavy lifting.
14038 Just replace the OMP directives with a NOP to avoid
14039 confusing RTL expansion. */
14040 if (seen_error () && is_gimple_omp (stmt))
14042 gsi_replace (gsi_p, gimple_build_nop (), true);
14043 return;
14046 switch (gimple_code (stmt))
14048 case GIMPLE_COND:
14050 gcond *cond_stmt = as_a <gcond *> (stmt);
14051 if ((ctx || task_shared_vars)
14052 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14053 lower_omp_regimplify_p,
14054 ctx ? NULL : &wi, NULL)
14055 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14056 lower_omp_regimplify_p,
14057 ctx ? NULL : &wi, NULL)))
14058 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14060 break;
14061 case GIMPLE_CATCH:
14062 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14063 break;
14064 case GIMPLE_EH_FILTER:
14065 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14066 break;
14067 case GIMPLE_TRY:
14068 lower_omp (gimple_try_eval_ptr (stmt), ctx);
14069 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14070 break;
14071 case GIMPLE_TRANSACTION:
14072 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14073 ctx);
14074 break;
14075 case GIMPLE_BIND:
14076 if (ctx && is_gimple_omp_oacc (ctx->stmt))
14078 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14079 oacc_privatization_scan_decl_chain (ctx, vars);
14081 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14082 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14083 break;
14084 case GIMPLE_OMP_PARALLEL:
14085 case GIMPLE_OMP_TASK:
14086 ctx = maybe_lookup_ctx (stmt);
14087 gcc_assert (ctx);
14088 if (ctx->cancellable)
14089 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14090 lower_omp_taskreg (gsi_p, ctx);
14091 break;
14092 case GIMPLE_OMP_FOR:
14093 ctx = maybe_lookup_ctx (stmt);
14094 gcc_assert (ctx);
14095 if (ctx->cancellable)
14096 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14097 lower_omp_for (gsi_p, ctx);
14098 break;
14099 case GIMPLE_OMP_SECTIONS:
14100 ctx = maybe_lookup_ctx (stmt);
14101 gcc_assert (ctx);
14102 if (ctx->cancellable)
14103 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14104 lower_omp_sections (gsi_p, ctx);
14105 break;
14106 case GIMPLE_OMP_SCOPE:
14107 ctx = maybe_lookup_ctx (stmt);
14108 gcc_assert (ctx);
14109 lower_omp_scope (gsi_p, ctx);
14110 break;
14111 case GIMPLE_OMP_SINGLE:
14112 ctx = maybe_lookup_ctx (stmt);
14113 gcc_assert (ctx);
14114 lower_omp_single (gsi_p, ctx);
14115 break;
14116 case GIMPLE_OMP_MASTER:
14117 case GIMPLE_OMP_MASKED:
14118 ctx = maybe_lookup_ctx (stmt);
14119 gcc_assert (ctx);
14120 lower_omp_master (gsi_p, ctx);
14121 break;
14122 case GIMPLE_OMP_TASKGROUP:
14123 ctx = maybe_lookup_ctx (stmt);
14124 gcc_assert (ctx);
14125 lower_omp_taskgroup (gsi_p, ctx);
14126 break;
14127 case GIMPLE_OMP_ORDERED:
14128 ctx = maybe_lookup_ctx (stmt);
14129 gcc_assert (ctx);
14130 lower_omp_ordered (gsi_p, ctx);
14131 break;
14132 case GIMPLE_OMP_SCAN:
14133 ctx = maybe_lookup_ctx (stmt);
14134 gcc_assert (ctx);
14135 lower_omp_scan (gsi_p, ctx);
14136 break;
14137 case GIMPLE_OMP_CRITICAL:
14138 ctx = maybe_lookup_ctx (stmt);
14139 gcc_assert (ctx);
14140 lower_omp_critical (gsi_p, ctx);
14141 break;
14142 case GIMPLE_OMP_ATOMIC_LOAD:
14143 if ((ctx || task_shared_vars)
14144 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14145 as_a <gomp_atomic_load *> (stmt)),
14146 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14147 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14148 break;
14149 case GIMPLE_OMP_TARGET:
14150 ctx = maybe_lookup_ctx (stmt);
14151 gcc_assert (ctx);
14152 lower_omp_target (gsi_p, ctx);
14153 break;
14154 case GIMPLE_OMP_TEAMS:
14155 ctx = maybe_lookup_ctx (stmt);
14156 gcc_assert (ctx);
14157 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14158 lower_omp_taskreg (gsi_p, ctx);
14159 else
14160 lower_omp_teams (gsi_p, ctx);
14161 break;
14162 case GIMPLE_CALL:
14163 tree fndecl;
14164 call_stmt = as_a <gcall *> (stmt);
14165 fndecl = gimple_call_fndecl (call_stmt);
14166 if (fndecl
14167 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14168 switch (DECL_FUNCTION_CODE (fndecl))
14170 case BUILT_IN_GOMP_BARRIER:
14171 if (ctx == NULL)
14172 break;
14173 /* FALLTHRU */
14174 case BUILT_IN_GOMP_CANCEL:
14175 case BUILT_IN_GOMP_CANCELLATION_POINT:
14176 omp_context *cctx;
14177 cctx = ctx;
14178 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14179 cctx = cctx->outer;
14180 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14181 if (!cctx->cancellable)
14183 if (DECL_FUNCTION_CODE (fndecl)
14184 == BUILT_IN_GOMP_CANCELLATION_POINT)
14186 stmt = gimple_build_nop ();
14187 gsi_replace (gsi_p, stmt, false);
14189 break;
14191 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14193 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14194 gimple_call_set_fndecl (call_stmt, fndecl);
14195 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14197 tree lhs;
14198 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14199 gimple_call_set_lhs (call_stmt, lhs);
14200 tree fallthru_label;
14201 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14202 gimple *g;
14203 g = gimple_build_label (fallthru_label);
14204 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14205 g = gimple_build_cond (NE_EXPR, lhs,
14206 fold_convert (TREE_TYPE (lhs),
14207 boolean_false_node),
14208 cctx->cancel_label, fallthru_label);
14209 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14210 break;
14211 default:
14212 break;
14214 goto regimplify;
14216 case GIMPLE_ASSIGN:
14217 for (omp_context *up = ctx; up; up = up->outer)
14219 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14220 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14221 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14222 || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14223 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14224 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14225 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14226 && (gimple_omp_target_kind (up->stmt)
14227 == GF_OMP_TARGET_KIND_DATA)))
14228 continue;
14229 else if (!up->lastprivate_conditional_map)
14230 break;
14231 tree lhs = get_base_address (gimple_assign_lhs (stmt));
14232 if (TREE_CODE (lhs) == MEM_REF
14233 && DECL_P (TREE_OPERAND (lhs, 0))
14234 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14235 0))) == REFERENCE_TYPE)
14236 lhs = TREE_OPERAND (lhs, 0);
14237 if (DECL_P (lhs))
14238 if (tree *v = up->lastprivate_conditional_map->get (lhs))
14240 tree clauses;
14241 if (up->combined_into_simd_safelen1)
14243 up = up->outer;
14244 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14245 up = up->outer;
14247 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14248 clauses = gimple_omp_for_clauses (up->stmt);
14249 else
14250 clauses = gimple_omp_sections_clauses (up->stmt);
14251 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14252 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14253 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14254 OMP_CLAUSE__CONDTEMP_);
14255 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14256 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14257 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14260 /* FALLTHRU */
14262 default:
14263 regimplify:
14264 if ((ctx || task_shared_vars)
14265 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14266 ctx ? NULL : &wi))
14268 /* Just remove clobbers, this should happen only if we have
14269 "privatized" local addressable variables in SIMD regions,
14270 the clobber isn't needed in that case and gimplifying address
14271 of the ARRAY_REF into a pointer and creating MEM_REF based
14272 clobber would create worse code than we get with the clobber
14273 dropped. */
14274 if (gimple_clobber_p (stmt))
14276 gsi_replace (gsi_p, gimple_build_nop (), true);
14277 break;
14279 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14281 break;
14285 static void
14286 lower_omp (gimple_seq *body, omp_context *ctx)
14288 location_t saved_location = input_location;
14289 gimple_stmt_iterator gsi;
14290 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14291 lower_omp_1 (&gsi, ctx);
14292 /* During gimplification, we haven't folded statments inside offloading
14293 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
14294 if (target_nesting_level || taskreg_nesting_level)
14295 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14296 fold_stmt (&gsi);
14297 input_location = saved_location;
14300 /* Main entry point. */
14302 static unsigned int
14303 execute_lower_omp (void)
14305 gimple_seq body;
14306 int i;
14307 omp_context *ctx;
14309 /* This pass always runs, to provide PROP_gimple_lomp.
14310 But often, there is nothing to do. */
14311 if (flag_openacc == 0 && flag_openmp == 0
14312 && flag_openmp_simd == 0)
14313 return 0;
14315 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14316 delete_omp_context);
14318 body = gimple_body (current_function_decl);
14320 scan_omp (&body, NULL);
14321 gcc_assert (taskreg_nesting_level == 0);
14322 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14323 finish_taskreg_scan (ctx);
14324 taskreg_contexts.release ();
14326 if (all_contexts->root)
14328 if (task_shared_vars)
14329 push_gimplify_context ();
14330 lower_omp (&body, NULL);
14331 if (task_shared_vars)
14332 pop_gimplify_context (NULL);
14335 if (all_contexts)
14337 splay_tree_delete (all_contexts);
14338 all_contexts = NULL;
14340 BITMAP_FREE (task_shared_vars);
14341 BITMAP_FREE (global_nonaddressable_vars);
14343 /* If current function is a method, remove artificial dummy VAR_DECL created
14344 for non-static data member privatization, they aren't needed for
14345 debuginfo nor anything else, have been already replaced everywhere in the
14346 IL and cause problems with LTO. */
14347 if (DECL_ARGUMENTS (current_function_decl)
14348 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14349 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14350 == POINTER_TYPE))
14351 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14352 return 0;
14355 namespace {
14357 const pass_data pass_data_lower_omp =
14359 GIMPLE_PASS, /* type */
14360 "omplower", /* name */
14361 OPTGROUP_OMP, /* optinfo_flags */
14362 TV_NONE, /* tv_id */
14363 PROP_gimple_any, /* properties_required */
14364 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14365 0, /* properties_destroyed */
14366 0, /* todo_flags_start */
14367 0, /* todo_flags_finish */
14370 class pass_lower_omp : public gimple_opt_pass
14372 public:
14373 pass_lower_omp (gcc::context *ctxt)
14374 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14377 /* opt_pass methods: */
14378 virtual unsigned int execute (function *) { return execute_lower_omp (); }
14380 }; // class pass_lower_omp
14382 } // anon namespace
14384 gimple_opt_pass *
14385 make_pass_lower_omp (gcc::context *ctxt)
14387 return new pass_lower_omp (ctxt);
14390 /* The following is a utility to diagnose structured block violations.
14391 It is not part of the "omplower" pass, as that's invoked too late. It
14392 should be invoked by the respective front ends after gimplification. */
14394 static splay_tree all_labels;
14396 /* Check for mismatched contexts and generate an error if needed. Return
14397 true if an error is detected. */
14399 static bool
14400 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14401 gimple *branch_ctx, gimple *label_ctx)
14403 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14404 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14406 if (label_ctx == branch_ctx)
14407 return false;
14409 const char* kind = NULL;
14411 if (flag_openacc)
14413 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14414 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14416 gcc_checking_assert (kind == NULL);
14417 kind = "OpenACC";
14420 if (kind == NULL)
14422 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14423 kind = "OpenMP";
14426 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14427 so we could traverse it and issue a correct "exit" or "enter" error
14428 message upon a structured block violation.
14430 We built the context by building a list with tree_cons'ing, but there is
14431 no easy counterpart in gimple tuples. It seems like far too much work
14432 for issuing exit/enter error messages. If someone really misses the
14433 distinct error message... patches welcome. */
14435 #if 0
14436 /* Try to avoid confusing the user by producing and error message
14437 with correct "exit" or "enter" verbiage. We prefer "exit"
14438 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14439 if (branch_ctx == NULL)
14440 exit_p = false;
14441 else
14443 while (label_ctx)
14445 if (TREE_VALUE (label_ctx) == branch_ctx)
14447 exit_p = false;
14448 break;
14450 label_ctx = TREE_CHAIN (label_ctx);
14454 if (exit_p)
14455 error ("invalid exit from %s structured block", kind);
14456 else
14457 error ("invalid entry to %s structured block", kind);
14458 #endif
14460 /* If it's obvious we have an invalid entry, be specific about the error. */
14461 if (branch_ctx == NULL)
14462 error ("invalid entry to %s structured block", kind);
14463 else
14465 /* Otherwise, be vague and lazy, but efficient. */
14466 error ("invalid branch to/from %s structured block", kind);
14469 gsi_replace (gsi_p, gimple_build_nop (), false);
14470 return true;
14473 /* Pass 1: Create a minimal tree of structured blocks, and record
14474 where each label is found. */
14476 static tree
14477 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14478 struct walk_stmt_info *wi)
14480 gimple *context = (gimple *) wi->info;
14481 gimple *inner_context;
14482 gimple *stmt = gsi_stmt (*gsi_p);
14484 *handled_ops_p = true;
14486 switch (gimple_code (stmt))
14488 WALK_SUBSTMTS;
14490 case GIMPLE_OMP_PARALLEL:
14491 case GIMPLE_OMP_TASK:
14492 case GIMPLE_OMP_SCOPE:
14493 case GIMPLE_OMP_SECTIONS:
14494 case GIMPLE_OMP_SINGLE:
14495 case GIMPLE_OMP_SECTION:
14496 case GIMPLE_OMP_MASTER:
14497 case GIMPLE_OMP_MASKED:
14498 case GIMPLE_OMP_ORDERED:
14499 case GIMPLE_OMP_SCAN:
14500 case GIMPLE_OMP_CRITICAL:
14501 case GIMPLE_OMP_TARGET:
14502 case GIMPLE_OMP_TEAMS:
14503 case GIMPLE_OMP_TASKGROUP:
14504 /* The minimal context here is just the current OMP construct. */
14505 inner_context = stmt;
14506 wi->info = inner_context;
14507 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14508 wi->info = context;
14509 break;
14511 case GIMPLE_OMP_FOR:
14512 inner_context = stmt;
14513 wi->info = inner_context;
14514 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14515 walk them. */
14516 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14517 diagnose_sb_1, NULL, wi);
14518 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14519 wi->info = context;
14520 break;
14522 case GIMPLE_LABEL:
14523 splay_tree_insert (all_labels,
14524 (splay_tree_key) gimple_label_label (
14525 as_a <glabel *> (stmt)),
14526 (splay_tree_value) context);
14527 break;
14529 default:
14530 break;
14533 return NULL_TREE;
14536 /* Pass 2: Check each branch and see if its context differs from that of
14537 the destination label's context. */
14539 static tree
14540 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14541 struct walk_stmt_info *wi)
14543 gimple *context = (gimple *) wi->info;
14544 splay_tree_node n;
14545 gimple *stmt = gsi_stmt (*gsi_p);
14547 *handled_ops_p = true;
14549 switch (gimple_code (stmt))
14551 WALK_SUBSTMTS;
14553 case GIMPLE_OMP_PARALLEL:
14554 case GIMPLE_OMP_TASK:
14555 case GIMPLE_OMP_SCOPE:
14556 case GIMPLE_OMP_SECTIONS:
14557 case GIMPLE_OMP_SINGLE:
14558 case GIMPLE_OMP_SECTION:
14559 case GIMPLE_OMP_MASTER:
14560 case GIMPLE_OMP_MASKED:
14561 case GIMPLE_OMP_ORDERED:
14562 case GIMPLE_OMP_SCAN:
14563 case GIMPLE_OMP_CRITICAL:
14564 case GIMPLE_OMP_TARGET:
14565 case GIMPLE_OMP_TEAMS:
14566 case GIMPLE_OMP_TASKGROUP:
14567 wi->info = stmt;
14568 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14569 wi->info = context;
14570 break;
14572 case GIMPLE_OMP_FOR:
14573 wi->info = stmt;
14574 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14575 walk them. */
14576 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14577 diagnose_sb_2, NULL, wi);
14578 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14579 wi->info = context;
14580 break;
14582 case GIMPLE_COND:
14584 gcond *cond_stmt = as_a <gcond *> (stmt);
14585 tree lab = gimple_cond_true_label (cond_stmt);
14586 if (lab)
14588 n = splay_tree_lookup (all_labels,
14589 (splay_tree_key) lab);
14590 diagnose_sb_0 (gsi_p, context,
14591 n ? (gimple *) n->value : NULL);
14593 lab = gimple_cond_false_label (cond_stmt);
14594 if (lab)
14596 n = splay_tree_lookup (all_labels,
14597 (splay_tree_key) lab);
14598 diagnose_sb_0 (gsi_p, context,
14599 n ? (gimple *) n->value : NULL);
14602 break;
14604 case GIMPLE_GOTO:
14606 tree lab = gimple_goto_dest (stmt);
14607 if (TREE_CODE (lab) != LABEL_DECL)
14608 break;
14610 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14611 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14613 break;
14615 case GIMPLE_SWITCH:
14617 gswitch *switch_stmt = as_a <gswitch *> (stmt);
14618 unsigned int i;
14619 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
14621 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
14622 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14623 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
14624 break;
14627 break;
14629 case GIMPLE_RETURN:
14630 diagnose_sb_0 (gsi_p, context, NULL);
14631 break;
14633 default:
14634 break;
14637 return NULL_TREE;
14640 static unsigned int
14641 diagnose_omp_structured_block_errors (void)
14643 struct walk_stmt_info wi;
14644 gimple_seq body = gimple_body (current_function_decl);
14646 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
14648 memset (&wi, 0, sizeof (wi));
14649 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
14651 memset (&wi, 0, sizeof (wi));
14652 wi.want_locations = true;
14653 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
14655 gimple_set_body (current_function_decl, body);
14657 splay_tree_delete (all_labels);
14658 all_labels = NULL;
14660 return 0;
14663 namespace {
14665 const pass_data pass_data_diagnose_omp_blocks =
14667 GIMPLE_PASS, /* type */
14668 "*diagnose_omp_blocks", /* name */
14669 OPTGROUP_OMP, /* optinfo_flags */
14670 TV_NONE, /* tv_id */
14671 PROP_gimple_any, /* properties_required */
14672 0, /* properties_provided */
14673 0, /* properties_destroyed */
14674 0, /* todo_flags_start */
14675 0, /* todo_flags_finish */
14678 class pass_diagnose_omp_blocks : public gimple_opt_pass
14680 public:
14681 pass_diagnose_omp_blocks (gcc::context *ctxt)
14682 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
14685 /* opt_pass methods: */
14686 virtual bool gate (function *)
14688 return flag_openacc || flag_openmp || flag_openmp_simd;
14690 virtual unsigned int execute (function *)
14692 return diagnose_omp_structured_block_errors ();
14695 }; // class pass_diagnose_omp_blocks
14697 } // anon namespace
14699 gimple_opt_pass *
14700 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
14702 return new pass_diagnose_omp_blocks (ctxt);
14706 #include "gt-omp-low.h"