[Ada] Empty CUDA_Global procedures when compiling for host
[official-gcc.git] / gcc / omp-low.c
blobf7242dfbbca848486752c2700838447e2124b566
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2021 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
154 /* True if this construct can be cancelled. */
155 bool cancellable;
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap task_shared_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
195 static void scan_omp (gimple_seq *, omp_context *);
196 static tree scan_omp_1_op (tree *, int *, void *);
198 #define WALK_SUBSTMTS \
199 case GIMPLE_BIND: \
200 case GIMPLE_TRY: \
201 case GIMPLE_CATCH: \
202 case GIMPLE_EH_FILTER: \
203 case GIMPLE_TRANSACTION: \
204 /* The sub-statements for these should be walked. */ \
205 *handled_ops_p = false; \
206 break;
208 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
209 (This doesn't include OpenACC 'kernels' decomposed parts.) */
211 static bool
212 is_oacc_parallel_or_serial (omp_context *ctx)
214 enum gimple_code outer_type = gimple_code (ctx->stmt);
215 return ((outer_type == GIMPLE_OMP_TARGET)
216 && ((gimple_omp_target_kind (ctx->stmt)
217 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
218 || (gimple_omp_target_kind (ctx->stmt)
219 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
222 /* Return whether CTX represents an OpenACC 'kernels' construct.
223 (This doesn't include OpenACC 'kernels' decomposed parts.) */
225 static bool
226 is_oacc_kernels (omp_context *ctx)
228 enum gimple_code outer_type = gimple_code (ctx->stmt);
229 return ((outer_type == GIMPLE_OMP_TARGET)
230 && (gimple_omp_target_kind (ctx->stmt)
231 == GF_OMP_TARGET_KIND_OACC_KERNELS));
234 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
236 static bool
237 is_oacc_kernels_decomposed_part (omp_context *ctx)
239 enum gimple_code outer_type = gimple_code (ctx->stmt);
240 return ((outer_type == GIMPLE_OMP_TARGET)
241 && ((gimple_omp_target_kind (ctx->stmt)
242 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
243 || (gimple_omp_target_kind (ctx->stmt)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
245 || (gimple_omp_target_kind (ctx->stmt)
246 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
249 /* Return true if STMT corresponds to an OpenMP target region. */
250 static bool
251 is_omp_target (gimple *stmt)
253 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
255 int kind = gimple_omp_target_kind (stmt);
256 return (kind == GF_OMP_TARGET_KIND_REGION
257 || kind == GF_OMP_TARGET_KIND_DATA
258 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
259 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
261 return false;
264 /* If DECL is the artificial dummy VAR_DECL created for non-static
265 data member privatization, return the underlying "this" parameter,
266 otherwise return NULL. */
268 tree
269 omp_member_access_dummy_var (tree decl)
271 if (!VAR_P (decl)
272 || !DECL_ARTIFICIAL (decl)
273 || !DECL_IGNORED_P (decl)
274 || !DECL_HAS_VALUE_EXPR_P (decl)
275 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
276 return NULL_TREE;
278 tree v = DECL_VALUE_EXPR (decl);
279 if (TREE_CODE (v) != COMPONENT_REF)
280 return NULL_TREE;
282 while (1)
283 switch (TREE_CODE (v))
285 case COMPONENT_REF:
286 case MEM_REF:
287 case INDIRECT_REF:
288 CASE_CONVERT:
289 case POINTER_PLUS_EXPR:
290 v = TREE_OPERAND (v, 0);
291 continue;
292 case PARM_DECL:
293 if (DECL_CONTEXT (v) == current_function_decl
294 && DECL_ARTIFICIAL (v)
295 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
296 return v;
297 return NULL_TREE;
298 default:
299 return NULL_TREE;
303 /* Helper for unshare_and_remap, called through walk_tree. */
305 static tree
306 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
308 tree *pair = (tree *) data;
309 if (*tp == pair[0])
311 *tp = unshare_expr (pair[1]);
312 *walk_subtrees = 0;
314 else if (IS_TYPE_OR_DECL_P (*tp))
315 *walk_subtrees = 0;
316 return NULL_TREE;
319 /* Return unshare_expr (X) with all occurrences of FROM
320 replaced with TO. */
322 static tree
323 unshare_and_remap (tree x, tree from, tree to)
325 tree pair[2] = { from, to };
326 x = unshare_expr (x);
327 walk_tree (&x, unshare_and_remap_1, pair, NULL);
328 return x;
331 /* Convenience function for calling scan_omp_1_op on tree operands. */
333 static inline tree
334 scan_omp_op (tree *tp, omp_context *ctx)
336 struct walk_stmt_info wi;
338 memset (&wi, 0, sizeof (wi));
339 wi.info = ctx;
340 wi.want_locations = true;
342 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
345 static void lower_omp (gimple_seq *, omp_context *);
346 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
347 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
349 /* Return true if CTX is for an omp parallel. */
351 static inline bool
352 is_parallel_ctx (omp_context *ctx)
354 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
358 /* Return true if CTX is for an omp task. */
360 static inline bool
361 is_task_ctx (omp_context *ctx)
363 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
367 /* Return true if CTX is for an omp taskloop. */
369 static inline bool
370 is_taskloop_ctx (omp_context *ctx)
372 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
373 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
377 /* Return true if CTX is for a host omp teams. */
379 static inline bool
380 is_host_teams_ctx (omp_context *ctx)
382 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
383 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
386 /* Return true if CTX is for an omp parallel or omp task or host omp teams
387 (the last one is strictly not a task region in OpenMP speak, but we
388 need to treat it similarly). */
390 static inline bool
391 is_taskreg_ctx (omp_context *ctx)
393 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
396 /* Return true if EXPR is variable sized. */
398 static inline bool
399 is_variable_sized (const_tree expr)
401 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
404 /* Lookup variables. The "maybe" form
405 allows for the variable form to not have been entered, otherwise we
406 assert that the variable must have been entered. */
408 static inline tree
409 lookup_decl (tree var, omp_context *ctx)
411 tree *n = ctx->cb.decl_map->get (var);
412 return *n;
415 static inline tree
416 maybe_lookup_decl (const_tree var, omp_context *ctx)
418 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
419 return n ? *n : NULL_TREE;
422 static inline tree
423 lookup_field (tree var, omp_context *ctx)
425 splay_tree_node n;
426 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
427 return (tree) n->value;
430 static inline tree
431 lookup_sfield (splay_tree_key key, omp_context *ctx)
433 splay_tree_node n;
434 n = splay_tree_lookup (ctx->sfield_map
435 ? ctx->sfield_map : ctx->field_map, key);
436 return (tree) n->value;
439 static inline tree
440 lookup_sfield (tree var, omp_context *ctx)
442 return lookup_sfield ((splay_tree_key) var, ctx);
445 static inline tree
446 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
448 splay_tree_node n;
449 n = splay_tree_lookup (ctx->field_map, key);
450 return n ? (tree) n->value : NULL_TREE;
453 static inline tree
454 maybe_lookup_field (tree var, omp_context *ctx)
456 return maybe_lookup_field ((splay_tree_key) var, ctx);
459 /* Return true if DECL should be copied by pointer. SHARED_CTX is
460 the parallel context if DECL is to be shared. */
462 static bool
463 use_pointer_for_field (tree decl, omp_context *shared_ctx)
465 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
466 || TYPE_ATOMIC (TREE_TYPE (decl)))
467 return true;
469 /* We can only use copy-in/copy-out semantics for shared variables
470 when we know the value is not accessible from an outer scope. */
471 if (shared_ctx)
473 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
475 /* ??? Trivially accessible from anywhere. But why would we even
476 be passing an address in this case? Should we simply assert
477 this to be false, or should we have a cleanup pass that removes
478 these from the list of mappings? */
479 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
480 return true;
482 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
483 without analyzing the expression whether or not its location
484 is accessible to anyone else. In the case of nested parallel
485 regions it certainly may be. */
486 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
487 return true;
489 /* Do not use copy-in/copy-out for variables that have their
490 address taken. */
491 if (is_global_var (decl))
493 /* For file scope vars, track whether we've seen them as
494 non-addressable initially and in that case, keep the same
495 answer for the duration of the pass, even when they are made
496 addressable later on e.g. through reduction expansion. Global
497 variables which weren't addressable before the pass will not
498 have their privatized copies address taken. See PR91216. */
499 if (!TREE_ADDRESSABLE (decl))
501 if (!global_nonaddressable_vars)
502 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
503 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
505 else if (!global_nonaddressable_vars
506 || !bitmap_bit_p (global_nonaddressable_vars,
507 DECL_UID (decl)))
508 return true;
510 else if (TREE_ADDRESSABLE (decl))
511 return true;
513 /* lower_send_shared_vars only uses copy-in, but not copy-out
514 for these. */
515 if (TREE_READONLY (decl)
516 || ((TREE_CODE (decl) == RESULT_DECL
517 || TREE_CODE (decl) == PARM_DECL)
518 && DECL_BY_REFERENCE (decl)))
519 return false;
521 /* Disallow copy-in/out in nested parallel if
522 decl is shared in outer parallel, otherwise
523 each thread could store the shared variable
524 in its own copy-in location, making the
525 variable no longer really shared. */
526 if (shared_ctx->is_nested)
528 omp_context *up;
530 for (up = shared_ctx->outer; up; up = up->outer)
531 if ((is_taskreg_ctx (up)
532 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
533 && is_gimple_omp_offloaded (up->stmt)))
534 && maybe_lookup_decl (decl, up))
535 break;
537 if (up)
539 tree c;
541 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
543 for (c = gimple_omp_target_clauses (up->stmt);
544 c; c = OMP_CLAUSE_CHAIN (c))
545 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
546 && OMP_CLAUSE_DECL (c) == decl)
547 break;
549 else
550 for (c = gimple_omp_taskreg_clauses (up->stmt);
551 c; c = OMP_CLAUSE_CHAIN (c))
552 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
553 && OMP_CLAUSE_DECL (c) == decl)
554 break;
556 if (c)
557 goto maybe_mark_addressable_and_ret;
561 /* For tasks avoid using copy-in/out. As tasks can be
562 deferred or executed in different thread, when GOMP_task
563 returns, the task hasn't necessarily terminated. */
564 if (is_task_ctx (shared_ctx))
566 tree outer;
567 maybe_mark_addressable_and_ret:
568 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
569 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
571 /* Taking address of OUTER in lower_send_shared_vars
572 might need regimplification of everything that uses the
573 variable. */
574 if (!task_shared_vars)
575 task_shared_vars = BITMAP_ALLOC (NULL);
576 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
577 TREE_ADDRESSABLE (outer) = 1;
579 return true;
583 return false;
586 /* Construct a new automatic decl similar to VAR. */
588 static tree
589 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
591 tree copy = copy_var_decl (var, name, type);
593 DECL_CONTEXT (copy) = current_function_decl;
594 DECL_CHAIN (copy) = ctx->block_vars;
595 /* If VAR is listed in task_shared_vars, it means it wasn't
596 originally addressable and is just because task needs to take
597 it's address. But we don't need to take address of privatizations
598 from that var. */
599 if (TREE_ADDRESSABLE (var)
600 && ((task_shared_vars
601 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
602 || (global_nonaddressable_vars
603 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
604 TREE_ADDRESSABLE (copy) = 0;
605 ctx->block_vars = copy;
607 return copy;
610 static tree
611 omp_copy_decl_1 (tree var, omp_context *ctx)
613 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
616 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
617 as appropriate. */
618 /* See also 'gcc/omp-oacc-neuter-broadcast.cc:oacc_build_component_ref'. */
620 static tree
621 omp_build_component_ref (tree obj, tree field)
623 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
624 if (TREE_THIS_VOLATILE (field))
625 TREE_THIS_VOLATILE (ret) |= 1;
626 if (TREE_READONLY (field))
627 TREE_READONLY (ret) |= 1;
628 return ret;
631 /* Build tree nodes to access the field for VAR on the receiver side. */
633 static tree
634 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
636 tree x, field = lookup_field (var, ctx);
638 /* If the receiver record type was remapped in the child function,
639 remap the field into the new record type. */
640 x = maybe_lookup_field (field, ctx);
641 if (x != NULL)
642 field = x;
644 x = build_simple_mem_ref (ctx->receiver_decl);
645 TREE_THIS_NOTRAP (x) = 1;
646 x = omp_build_component_ref (x, field);
647 if (by_ref)
649 x = build_simple_mem_ref (x);
650 TREE_THIS_NOTRAP (x) = 1;
653 return x;
656 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
657 of a parallel, this is a component reference; for workshare constructs
658 this is some variable. */
660 static tree
661 build_outer_var_ref (tree var, omp_context *ctx,
662 enum omp_clause_code code = OMP_CLAUSE_ERROR)
664 tree x;
665 omp_context *outer = ctx->outer;
666 for (; outer; outer = outer->outer)
668 if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
669 continue;
670 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
671 && !maybe_lookup_decl (var, outer))
672 continue;
673 break;
676 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
677 x = var;
678 else if (is_variable_sized (var))
680 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
681 x = build_outer_var_ref (x, ctx, code);
682 x = build_simple_mem_ref (x);
684 else if (is_taskreg_ctx (ctx))
686 bool by_ref = use_pointer_for_field (var, NULL);
687 x = build_receiver_ref (var, by_ref, ctx);
689 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
690 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
691 || ctx->loop_p
692 || (code == OMP_CLAUSE_PRIVATE
693 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
694 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
695 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
697 /* #pragma omp simd isn't a worksharing construct, and can reference
698 even private vars in its linear etc. clauses.
699 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
700 to private vars in all worksharing constructs. */
701 x = NULL_TREE;
702 if (outer && is_taskreg_ctx (outer))
703 x = lookup_decl (var, outer);
704 else if (outer)
705 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
706 if (x == NULL_TREE)
707 x = var;
709 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
711 gcc_assert (outer);
712 splay_tree_node n
713 = splay_tree_lookup (outer->field_map,
714 (splay_tree_key) &DECL_UID (var));
715 if (n == NULL)
717 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
718 x = var;
719 else
720 x = lookup_decl (var, outer);
722 else
724 tree field = (tree) n->value;
725 /* If the receiver record type was remapped in the child function,
726 remap the field into the new record type. */
727 x = maybe_lookup_field (field, outer);
728 if (x != NULL)
729 field = x;
731 x = build_simple_mem_ref (outer->receiver_decl);
732 x = omp_build_component_ref (x, field);
733 if (use_pointer_for_field (var, outer))
734 x = build_simple_mem_ref (x);
737 else if (outer)
738 x = lookup_decl (var, outer);
739 else if (omp_privatize_by_reference (var))
740 /* This can happen with orphaned constructs. If var is reference, it is
741 possible it is shared and as such valid. */
742 x = var;
743 else if (omp_member_access_dummy_var (var))
744 x = var;
745 else
746 gcc_unreachable ();
748 if (x == var)
750 tree t = omp_member_access_dummy_var (var);
751 if (t)
753 x = DECL_VALUE_EXPR (var);
754 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
755 if (o != t)
756 x = unshare_and_remap (x, t, o);
757 else
758 x = unshare_expr (x);
762 if (omp_privatize_by_reference (var))
763 x = build_simple_mem_ref (x);
765 return x;
768 /* Build tree nodes to access the field for VAR on the sender side. */
770 static tree
771 build_sender_ref (splay_tree_key key, omp_context *ctx)
773 tree field = lookup_sfield (key, ctx);
774 return omp_build_component_ref (ctx->sender_decl, field);
777 static tree
778 build_sender_ref (tree var, omp_context *ctx)
780 return build_sender_ref ((splay_tree_key) var, ctx);
783 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
784 BASE_POINTERS_RESTRICT, declare the field with restrict. */
786 static void
787 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
789 tree field, type, sfield = NULL_TREE;
790 splay_tree_key key = (splay_tree_key) var;
792 if ((mask & 16) != 0)
794 key = (splay_tree_key) &DECL_NAME (var);
795 gcc_checking_assert (key != (splay_tree_key) var);
797 if ((mask & 8) != 0)
799 key = (splay_tree_key) &DECL_UID (var);
800 gcc_checking_assert (key != (splay_tree_key) var);
802 gcc_assert ((mask & 1) == 0
803 || !splay_tree_lookup (ctx->field_map, key));
804 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
805 || !splay_tree_lookup (ctx->sfield_map, key));
806 gcc_assert ((mask & 3) == 3
807 || !is_gimple_omp_oacc (ctx->stmt));
809 type = TREE_TYPE (var);
810 if ((mask & 16) != 0)
811 type = lang_hooks.decls.omp_array_data (var, true);
813 /* Prevent redeclaring the var in the split-off function with a restrict
814 pointer type. Note that we only clear type itself, restrict qualifiers in
815 the pointed-to type will be ignored by points-to analysis. */
816 if (POINTER_TYPE_P (type)
817 && TYPE_RESTRICT (type))
818 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
820 if (mask & 4)
822 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
823 type = build_pointer_type (build_pointer_type (type));
825 else if (by_ref)
826 type = build_pointer_type (type);
827 else if ((mask & (32 | 3)) == 1
828 && omp_privatize_by_reference (var))
829 type = TREE_TYPE (type);
831 field = build_decl (DECL_SOURCE_LOCATION (var),
832 FIELD_DECL, DECL_NAME (var), type);
834 /* Remember what variable this field was created for. This does have a
835 side effect of making dwarf2out ignore this member, so for helpful
836 debugging we clear it later in delete_omp_context. */
837 DECL_ABSTRACT_ORIGIN (field) = var;
838 if ((mask & 16) == 0 && type == TREE_TYPE (var))
840 SET_DECL_ALIGN (field, DECL_ALIGN (var));
841 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
842 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
844 else
845 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
847 if ((mask & 3) == 3)
849 insert_field_into_struct (ctx->record_type, field);
850 if (ctx->srecord_type)
852 sfield = build_decl (DECL_SOURCE_LOCATION (var),
853 FIELD_DECL, DECL_NAME (var), type);
854 DECL_ABSTRACT_ORIGIN (sfield) = var;
855 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
856 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
857 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
858 insert_field_into_struct (ctx->srecord_type, sfield);
861 else
863 if (ctx->srecord_type == NULL_TREE)
865 tree t;
867 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
868 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
869 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
871 sfield = build_decl (DECL_SOURCE_LOCATION (t),
872 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
873 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
874 insert_field_into_struct (ctx->srecord_type, sfield);
875 splay_tree_insert (ctx->sfield_map,
876 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
877 (splay_tree_value) sfield);
880 sfield = field;
881 insert_field_into_struct ((mask & 1) ? ctx->record_type
882 : ctx->srecord_type, field);
885 if (mask & 1)
886 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
887 if ((mask & 2) && ctx->sfield_map)
888 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
891 static tree
892 install_var_local (tree var, omp_context *ctx)
894 tree new_var = omp_copy_decl_1 (var, ctx);
895 insert_decl_map (&ctx->cb, var, new_var);
896 return new_var;
899 /* Adjust the replacement for DECL in CTX for the new context. This means
900 copying the DECL_VALUE_EXPR, and fixing up the type. */
902 static void
903 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
905 tree new_decl, size;
907 new_decl = lookup_decl (decl, ctx);
909 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
911 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
912 && DECL_HAS_VALUE_EXPR_P (decl))
914 tree ve = DECL_VALUE_EXPR (decl);
915 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
916 SET_DECL_VALUE_EXPR (new_decl, ve);
917 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
920 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
922 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
923 if (size == error_mark_node)
924 size = TYPE_SIZE (TREE_TYPE (new_decl));
925 DECL_SIZE (new_decl) = size;
927 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
928 if (size == error_mark_node)
929 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
930 DECL_SIZE_UNIT (new_decl) = size;
934 /* The callback for remap_decl. Search all containing contexts for a
935 mapping of the variable; this avoids having to duplicate the splay
936 tree ahead of time. We know a mapping doesn't already exist in the
937 given context. Create new mappings to implement default semantics. */
939 static tree
940 omp_copy_decl (tree var, copy_body_data *cb)
942 omp_context *ctx = (omp_context *) cb;
943 tree new_var;
945 if (TREE_CODE (var) == LABEL_DECL)
947 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
948 return var;
949 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
950 DECL_CONTEXT (new_var) = current_function_decl;
951 insert_decl_map (&ctx->cb, var, new_var);
952 return new_var;
955 while (!is_taskreg_ctx (ctx))
957 ctx = ctx->outer;
958 if (ctx == NULL)
959 return var;
960 new_var = maybe_lookup_decl (var, ctx);
961 if (new_var)
962 return new_var;
965 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
966 return var;
968 return error_mark_node;
971 /* Create a new context, with OUTER_CTX being the surrounding context. */
973 static omp_context *
974 new_omp_context (gimple *stmt, omp_context *outer_ctx)
976 omp_context *ctx = XCNEW (omp_context);
978 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
979 (splay_tree_value) ctx);
980 ctx->stmt = stmt;
982 if (outer_ctx)
984 ctx->outer = outer_ctx;
985 ctx->cb = outer_ctx->cb;
986 ctx->cb.block = NULL;
987 ctx->depth = outer_ctx->depth + 1;
989 else
991 ctx->cb.src_fn = current_function_decl;
992 ctx->cb.dst_fn = current_function_decl;
993 ctx->cb.src_node = cgraph_node::get (current_function_decl);
994 gcc_checking_assert (ctx->cb.src_node);
995 ctx->cb.dst_node = ctx->cb.src_node;
996 ctx->cb.src_cfun = cfun;
997 ctx->cb.copy_decl = omp_copy_decl;
998 ctx->cb.eh_lp_nr = 0;
999 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
1000 ctx->cb.adjust_array_error_bounds = true;
1001 ctx->cb.dont_remap_vla_if_no_change = true;
1002 ctx->depth = 1;
1005 ctx->cb.decl_map = new hash_map<tree, tree>;
1007 return ctx;
1010 static gimple_seq maybe_catch_exception (gimple_seq);
1012 /* Finalize task copyfn. */
1014 static void
1015 finalize_task_copyfn (gomp_task *task_stmt)
1017 struct function *child_cfun;
1018 tree child_fn;
1019 gimple_seq seq = NULL, new_seq;
1020 gbind *bind;
1022 child_fn = gimple_omp_task_copy_fn (task_stmt);
1023 if (child_fn == NULL_TREE)
1024 return;
1026 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1027 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1029 push_cfun (child_cfun);
1030 bind = gimplify_body (child_fn, false);
1031 gimple_seq_add_stmt (&seq, bind);
1032 new_seq = maybe_catch_exception (seq);
1033 if (new_seq != seq)
1035 bind = gimple_build_bind (NULL, new_seq, NULL);
1036 seq = NULL;
1037 gimple_seq_add_stmt (&seq, bind);
1039 gimple_set_body (child_fn, seq);
1040 pop_cfun ();
1042 /* Inform the callgraph about the new function. */
1043 cgraph_node *node = cgraph_node::get_create (child_fn);
1044 node->parallelized_function = 1;
1045 cgraph_node::add_new_function (child_fn, false);
1048 /* Destroy a omp_context data structures. Called through the splay tree
1049 value delete callback. */
1051 static void
1052 delete_omp_context (splay_tree_value value)
1054 omp_context *ctx = (omp_context *) value;
1056 delete ctx->cb.decl_map;
1058 if (ctx->field_map)
1059 splay_tree_delete (ctx->field_map);
1060 if (ctx->sfield_map)
1061 splay_tree_delete (ctx->sfield_map);
1063 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1064 it produces corrupt debug information. */
1065 if (ctx->record_type)
1067 tree t;
1068 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1069 DECL_ABSTRACT_ORIGIN (t) = NULL;
1071 if (ctx->srecord_type)
1073 tree t;
1074 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1075 DECL_ABSTRACT_ORIGIN (t) = NULL;
1078 if (is_task_ctx (ctx))
1079 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1081 if (ctx->task_reduction_map)
1083 ctx->task_reductions.release ();
1084 delete ctx->task_reduction_map;
1087 delete ctx->lastprivate_conditional_map;
1088 delete ctx->allocate_map;
1090 XDELETE (ctx);
1093 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1094 context. */
1096 static void
1097 fixup_child_record_type (omp_context *ctx)
1099 tree f, type = ctx->record_type;
1101 if (!ctx->receiver_decl)
1102 return;
1103 /* ??? It isn't sufficient to just call remap_type here, because
1104 variably_modified_type_p doesn't work the way we expect for
1105 record types. Testing each field for whether it needs remapping
1106 and creating a new record by hand works, however. */
1107 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1108 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1109 break;
1110 if (f)
1112 tree name, new_fields = NULL;
1114 type = lang_hooks.types.make_type (RECORD_TYPE);
1115 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1116 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1117 TYPE_DECL, name, type);
1118 TYPE_NAME (type) = name;
1120 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1122 tree new_f = copy_node (f);
1123 DECL_CONTEXT (new_f) = type;
1124 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1125 DECL_CHAIN (new_f) = new_fields;
1126 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1127 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1128 &ctx->cb, NULL);
1129 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1130 &ctx->cb, NULL);
1131 new_fields = new_f;
1133 /* Arrange to be able to look up the receiver field
1134 given the sender field. */
1135 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1136 (splay_tree_value) new_f);
1138 TYPE_FIELDS (type) = nreverse (new_fields);
1139 layout_type (type);
1142 /* In a target region we never modify any of the pointers in *.omp_data_i,
1143 so attempt to help the optimizers. */
1144 if (is_gimple_omp_offloaded (ctx->stmt))
1145 type = build_qualified_type (type, TYPE_QUAL_CONST);
1147 TREE_TYPE (ctx->receiver_decl)
1148 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1151 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1152 specified by CLAUSES. */
1154 static void
1155 scan_sharing_clauses (tree clauses, omp_context *ctx)
1157 tree c, decl;
1158 bool scan_array_reductions = false;
1160 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1161 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1162 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1163 /* omp_default_mem_alloc is 1 */
1164 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1165 || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1167 if (ctx->allocate_map == NULL)
1168 ctx->allocate_map = new hash_map<tree, tree>;
1169 tree val = integer_zero_node;
1170 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1171 val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1172 if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1173 val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1174 ctx->allocate_map->put (OMP_CLAUSE_DECL (c), val);
1177 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1179 bool by_ref;
1181 switch (OMP_CLAUSE_CODE (c))
1183 case OMP_CLAUSE_PRIVATE:
1184 decl = OMP_CLAUSE_DECL (c);
1185 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1186 goto do_private;
1187 else if (!is_variable_sized (decl))
1188 install_var_local (decl, ctx);
1189 break;
1191 case OMP_CLAUSE_SHARED:
1192 decl = OMP_CLAUSE_DECL (c);
1193 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1194 ctx->allocate_map->remove (decl);
1195 /* Ignore shared directives in teams construct inside of
1196 target construct. */
1197 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1198 && !is_host_teams_ctx (ctx))
1200 /* Global variables don't need to be copied,
1201 the receiver side will use them directly. */
1202 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1203 if (is_global_var (odecl))
1204 break;
1205 insert_decl_map (&ctx->cb, decl, odecl);
1206 break;
1208 gcc_assert (is_taskreg_ctx (ctx));
1209 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1210 || !is_variable_sized (decl));
1211 /* Global variables don't need to be copied,
1212 the receiver side will use them directly. */
1213 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1214 break;
1215 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1217 use_pointer_for_field (decl, ctx);
1218 break;
1220 by_ref = use_pointer_for_field (decl, NULL);
1221 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1222 || TREE_ADDRESSABLE (decl)
1223 || by_ref
1224 || omp_privatize_by_reference (decl))
1226 by_ref = use_pointer_for_field (decl, ctx);
1227 install_var_field (decl, by_ref, 3, ctx);
1228 install_var_local (decl, ctx);
1229 break;
1231 /* We don't need to copy const scalar vars back. */
1232 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1233 goto do_private;
1235 case OMP_CLAUSE_REDUCTION:
1236 /* Collect 'reduction' clauses on OpenACC compute construct. */
1237 if (is_gimple_omp_oacc (ctx->stmt)
1238 && is_gimple_omp_offloaded (ctx->stmt))
1240 /* No 'reduction' clauses on OpenACC 'kernels'. */
1241 gcc_checking_assert (!is_oacc_kernels (ctx));
1242 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1243 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1245 ctx->local_reduction_clauses
1246 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1248 /* FALLTHRU */
1250 case OMP_CLAUSE_IN_REDUCTION:
1251 decl = OMP_CLAUSE_DECL (c);
1252 if (ctx->allocate_map
1253 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1254 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1255 || OMP_CLAUSE_REDUCTION_TASK (c)))
1256 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1257 || is_task_ctx (ctx)))
1259 /* For now. */
1260 if (ctx->allocate_map->get (decl))
1261 ctx->allocate_map->remove (decl);
1263 if (TREE_CODE (decl) == MEM_REF)
1265 tree t = TREE_OPERAND (decl, 0);
1266 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1267 t = TREE_OPERAND (t, 0);
1268 if (TREE_CODE (t) == INDIRECT_REF
1269 || TREE_CODE (t) == ADDR_EXPR)
1270 t = TREE_OPERAND (t, 0);
1271 if (is_omp_target (ctx->stmt))
1273 if (is_variable_sized (t))
1275 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1276 t = DECL_VALUE_EXPR (t);
1277 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
1278 t = TREE_OPERAND (t, 0);
1279 gcc_assert (DECL_P (t));
1281 tree at = t;
1282 if (ctx->outer)
1283 scan_omp_op (&at, ctx->outer);
1284 tree nt = omp_copy_decl_1 (at, ctx);
1285 splay_tree_insert (ctx->field_map,
1286 (splay_tree_key) &DECL_CONTEXT (t),
1287 (splay_tree_value) nt);
1288 if (at != t)
1289 splay_tree_insert (ctx->field_map,
1290 (splay_tree_key) &DECL_CONTEXT (at),
1291 (splay_tree_value) nt);
1292 break;
1294 install_var_local (t, ctx);
1295 if (is_taskreg_ctx (ctx)
1296 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1297 || (is_task_ctx (ctx)
1298 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1299 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1300 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1301 == POINTER_TYPE)))))
1302 && !is_variable_sized (t)
1303 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1304 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1305 && !is_task_ctx (ctx))))
1307 by_ref = use_pointer_for_field (t, NULL);
1308 if (is_task_ctx (ctx)
1309 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1310 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1312 install_var_field (t, false, 1, ctx);
1313 install_var_field (t, by_ref, 2, ctx);
1315 else
1316 install_var_field (t, by_ref, 3, ctx);
1318 break;
1320 if (is_omp_target (ctx->stmt))
1322 tree at = decl;
1323 if (ctx->outer)
1324 scan_omp_op (&at, ctx->outer);
1325 tree nt = omp_copy_decl_1 (at, ctx);
1326 splay_tree_insert (ctx->field_map,
1327 (splay_tree_key) &DECL_CONTEXT (decl),
1328 (splay_tree_value) nt);
1329 if (at != decl)
1330 splay_tree_insert (ctx->field_map,
1331 (splay_tree_key) &DECL_CONTEXT (at),
1332 (splay_tree_value) nt);
1333 break;
1335 if (is_task_ctx (ctx)
1336 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1337 && OMP_CLAUSE_REDUCTION_TASK (c)
1338 && is_parallel_ctx (ctx)))
1340 /* Global variables don't need to be copied,
1341 the receiver side will use them directly. */
1342 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1344 by_ref = use_pointer_for_field (decl, ctx);
1345 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1346 install_var_field (decl, by_ref, 3, ctx);
1348 install_var_local (decl, ctx);
1349 break;
1351 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1352 && OMP_CLAUSE_REDUCTION_TASK (c))
1354 install_var_local (decl, ctx);
1355 break;
1357 goto do_private;
1359 case OMP_CLAUSE_LASTPRIVATE:
1360 /* Let the corresponding firstprivate clause create
1361 the variable. */
1362 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1363 break;
1364 /* FALLTHRU */
1366 case OMP_CLAUSE_FIRSTPRIVATE:
1367 case OMP_CLAUSE_LINEAR:
1368 decl = OMP_CLAUSE_DECL (c);
1369 do_private:
1370 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1371 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1372 && is_gimple_omp_offloaded (ctx->stmt))
1374 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1376 by_ref = !omp_privatize_by_reference (decl);
1377 install_var_field (decl, by_ref, 3, ctx);
1379 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1380 install_var_field (decl, true, 3, ctx);
1381 else
1382 install_var_field (decl, false, 3, ctx);
1384 if (is_variable_sized (decl))
1386 if (is_task_ctx (ctx))
1388 if (ctx->allocate_map
1389 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1391 /* For now. */
1392 if (ctx->allocate_map->get (decl))
1393 ctx->allocate_map->remove (decl);
1395 install_var_field (decl, false, 1, ctx);
1397 break;
1399 else if (is_taskreg_ctx (ctx))
1401 bool global
1402 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1403 by_ref = use_pointer_for_field (decl, NULL);
1405 if (is_task_ctx (ctx)
1406 && (global || by_ref || omp_privatize_by_reference (decl)))
1408 if (ctx->allocate_map
1409 && ctx->allocate_map->get (decl))
1410 install_var_field (decl, by_ref, 32 | 1, ctx);
1411 else
1412 install_var_field (decl, false, 1, ctx);
1413 if (!global)
1414 install_var_field (decl, by_ref, 2, ctx);
1416 else if (!global)
1417 install_var_field (decl, by_ref, 3, ctx);
1419 install_var_local (decl, ctx);
1420 break;
1422 case OMP_CLAUSE_USE_DEVICE_PTR:
1423 case OMP_CLAUSE_USE_DEVICE_ADDR:
1424 decl = OMP_CLAUSE_DECL (c);
1426 /* Fortran array descriptors. */
1427 if (lang_hooks.decls.omp_array_data (decl, true))
1428 install_var_field (decl, false, 19, ctx);
1429 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1430 && !omp_privatize_by_reference (decl)
1431 && !omp_is_allocatable_or_ptr (decl))
1432 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1433 install_var_field (decl, true, 11, ctx);
1434 else
1435 install_var_field (decl, false, 11, ctx);
1436 if (DECL_SIZE (decl)
1437 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1439 tree decl2 = DECL_VALUE_EXPR (decl);
1440 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1441 decl2 = TREE_OPERAND (decl2, 0);
1442 gcc_assert (DECL_P (decl2));
1443 install_var_local (decl2, ctx);
1445 install_var_local (decl, ctx);
1446 break;
1448 case OMP_CLAUSE_IS_DEVICE_PTR:
1449 decl = OMP_CLAUSE_DECL (c);
1450 goto do_private;
1452 case OMP_CLAUSE__LOOPTEMP_:
1453 case OMP_CLAUSE__REDUCTEMP_:
1454 gcc_assert (is_taskreg_ctx (ctx));
1455 decl = OMP_CLAUSE_DECL (c);
1456 install_var_field (decl, false, 3, ctx);
1457 install_var_local (decl, ctx);
1458 break;
1460 case OMP_CLAUSE_COPYPRIVATE:
1461 case OMP_CLAUSE_COPYIN:
1462 decl = OMP_CLAUSE_DECL (c);
1463 by_ref = use_pointer_for_field (decl, NULL);
1464 install_var_field (decl, by_ref, 3, ctx);
1465 break;
1467 case OMP_CLAUSE_FINAL:
1468 case OMP_CLAUSE_IF:
1469 case OMP_CLAUSE_NUM_THREADS:
1470 case OMP_CLAUSE_NUM_TEAMS:
1471 case OMP_CLAUSE_THREAD_LIMIT:
1472 case OMP_CLAUSE_DEVICE:
1473 case OMP_CLAUSE_SCHEDULE:
1474 case OMP_CLAUSE_DIST_SCHEDULE:
1475 case OMP_CLAUSE_DEPEND:
1476 case OMP_CLAUSE_PRIORITY:
1477 case OMP_CLAUSE_GRAINSIZE:
1478 case OMP_CLAUSE_NUM_TASKS:
1479 case OMP_CLAUSE_NUM_GANGS:
1480 case OMP_CLAUSE_NUM_WORKERS:
1481 case OMP_CLAUSE_VECTOR_LENGTH:
1482 case OMP_CLAUSE_DETACH:
1483 case OMP_CLAUSE_FILTER:
1484 if (ctx->outer)
1485 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1486 break;
1488 case OMP_CLAUSE_TO:
1489 case OMP_CLAUSE_FROM:
1490 case OMP_CLAUSE_MAP:
1491 if (ctx->outer)
1492 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1493 decl = OMP_CLAUSE_DECL (c);
1494 /* Global variables with "omp declare target" attribute
1495 don't need to be copied, the receiver side will use them
1496 directly. However, global variables with "omp declare target link"
1497 attribute need to be copied. Or when ALWAYS modifier is used. */
1498 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1499 && DECL_P (decl)
1500 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1501 && (OMP_CLAUSE_MAP_KIND (c)
1502 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1503 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1504 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1505 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1506 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1507 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1508 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1509 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1510 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1511 && varpool_node::get_create (decl)->offloadable
1512 && !lookup_attribute ("omp declare target link",
1513 DECL_ATTRIBUTES (decl)))
1514 break;
1515 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1516 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1518 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1519 not offloaded; there is nothing to map for those. */
1520 if (!is_gimple_omp_offloaded (ctx->stmt)
1521 && !POINTER_TYPE_P (TREE_TYPE (decl))
1522 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1523 break;
1525 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1526 && DECL_P (decl)
1527 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1528 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1529 && is_omp_target (ctx->stmt))
1531 /* If this is an offloaded region, an attach operation should
1532 only exist when the pointer variable is mapped in a prior
1533 clause. */
1534 if (is_gimple_omp_offloaded (ctx->stmt))
1535 gcc_assert
1536 (maybe_lookup_decl (decl, ctx)
1537 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1538 && lookup_attribute ("omp declare target",
1539 DECL_ATTRIBUTES (decl))));
1541 /* By itself, attach/detach is generated as part of pointer
1542 variable mapping and should not create new variables in the
1543 offloaded region, however sender refs for it must be created
1544 for its address to be passed to the runtime. */
1545 tree field
1546 = build_decl (OMP_CLAUSE_LOCATION (c),
1547 FIELD_DECL, NULL_TREE, ptr_type_node);
1548 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1549 insert_field_into_struct (ctx->record_type, field);
1550 /* To not clash with a map of the pointer variable itself,
1551 attach/detach maps have their field looked up by the *clause*
1552 tree expression, not the decl. */
1553 gcc_assert (!splay_tree_lookup (ctx->field_map,
1554 (splay_tree_key) c));
1555 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1556 (splay_tree_value) field);
1557 break;
1559 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1560 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1561 || (OMP_CLAUSE_MAP_KIND (c)
1562 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1564 if (TREE_CODE (decl) == COMPONENT_REF
1565 || (TREE_CODE (decl) == INDIRECT_REF
1566 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1567 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1568 == REFERENCE_TYPE)))
1569 break;
1570 if (DECL_SIZE (decl)
1571 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1573 tree decl2 = DECL_VALUE_EXPR (decl);
1574 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1575 decl2 = TREE_OPERAND (decl2, 0);
1576 gcc_assert (DECL_P (decl2));
1577 install_var_local (decl2, ctx);
1579 install_var_local (decl, ctx);
1580 break;
1582 if (DECL_P (decl))
1584 if (DECL_SIZE (decl)
1585 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1587 tree decl2 = DECL_VALUE_EXPR (decl);
1588 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1589 decl2 = TREE_OPERAND (decl2, 0);
1590 gcc_assert (DECL_P (decl2));
1591 install_var_field (decl2, true, 3, ctx);
1592 install_var_local (decl2, ctx);
1593 install_var_local (decl, ctx);
1595 else
1597 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1598 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1599 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1600 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1601 install_var_field (decl, true, 7, ctx);
1602 else
1603 install_var_field (decl, true, 3, ctx);
1604 if (is_gimple_omp_offloaded (ctx->stmt)
1605 && !(is_gimple_omp_oacc (ctx->stmt)
1606 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1607 install_var_local (decl, ctx);
1610 else
1612 tree base = get_base_address (decl);
1613 tree nc = OMP_CLAUSE_CHAIN (c);
1614 if (DECL_P (base)
1615 && nc != NULL_TREE
1616 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1617 && OMP_CLAUSE_DECL (nc) == base
1618 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1619 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1621 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1622 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1624 else
1626 if (ctx->outer)
1628 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1629 decl = OMP_CLAUSE_DECL (c);
1631 gcc_assert (!splay_tree_lookup (ctx->field_map,
1632 (splay_tree_key) decl));
1633 tree field
1634 = build_decl (OMP_CLAUSE_LOCATION (c),
1635 FIELD_DECL, NULL_TREE, ptr_type_node);
1636 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1637 insert_field_into_struct (ctx->record_type, field);
1638 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1639 (splay_tree_value) field);
1642 break;
1644 case OMP_CLAUSE_ORDER:
1645 ctx->order_concurrent = true;
1646 break;
1648 case OMP_CLAUSE_BIND:
1649 ctx->loop_p = true;
1650 break;
1652 case OMP_CLAUSE_NOWAIT:
1653 case OMP_CLAUSE_ORDERED:
1654 case OMP_CLAUSE_COLLAPSE:
1655 case OMP_CLAUSE_UNTIED:
1656 case OMP_CLAUSE_MERGEABLE:
1657 case OMP_CLAUSE_PROC_BIND:
1658 case OMP_CLAUSE_SAFELEN:
1659 case OMP_CLAUSE_SIMDLEN:
1660 case OMP_CLAUSE_THREADS:
1661 case OMP_CLAUSE_SIMD:
1662 case OMP_CLAUSE_NOGROUP:
1663 case OMP_CLAUSE_DEFAULTMAP:
1664 case OMP_CLAUSE_ASYNC:
1665 case OMP_CLAUSE_WAIT:
1666 case OMP_CLAUSE_GANG:
1667 case OMP_CLAUSE_WORKER:
1668 case OMP_CLAUSE_VECTOR:
1669 case OMP_CLAUSE_INDEPENDENT:
1670 case OMP_CLAUSE_AUTO:
1671 case OMP_CLAUSE_SEQ:
1672 case OMP_CLAUSE_TILE:
1673 case OMP_CLAUSE__SIMT_:
1674 case OMP_CLAUSE_DEFAULT:
1675 case OMP_CLAUSE_NONTEMPORAL:
1676 case OMP_CLAUSE_IF_PRESENT:
1677 case OMP_CLAUSE_FINALIZE:
1678 case OMP_CLAUSE_TASK_REDUCTION:
1679 case OMP_CLAUSE_ALLOCATE:
1680 break;
1682 case OMP_CLAUSE_ALIGNED:
1683 decl = OMP_CLAUSE_DECL (c);
1684 if (is_global_var (decl)
1685 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1686 install_var_local (decl, ctx);
1687 break;
1689 case OMP_CLAUSE__CONDTEMP_:
1690 decl = OMP_CLAUSE_DECL (c);
1691 if (is_parallel_ctx (ctx))
1693 install_var_field (decl, false, 3, ctx);
1694 install_var_local (decl, ctx);
1696 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1697 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1698 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1699 install_var_local (decl, ctx);
1700 break;
1702 case OMP_CLAUSE__CACHE_:
1703 case OMP_CLAUSE_NOHOST:
1704 default:
1705 gcc_unreachable ();
1709 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1711 switch (OMP_CLAUSE_CODE (c))
1713 case OMP_CLAUSE_LASTPRIVATE:
1714 /* Let the corresponding firstprivate clause create
1715 the variable. */
1716 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1717 scan_array_reductions = true;
1718 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1719 break;
1720 /* FALLTHRU */
1722 case OMP_CLAUSE_FIRSTPRIVATE:
1723 case OMP_CLAUSE_PRIVATE:
1724 case OMP_CLAUSE_LINEAR:
1725 case OMP_CLAUSE_IS_DEVICE_PTR:
1726 decl = OMP_CLAUSE_DECL (c);
1727 if (is_variable_sized (decl))
1729 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1730 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1731 && is_gimple_omp_offloaded (ctx->stmt))
1733 tree decl2 = DECL_VALUE_EXPR (decl);
1734 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1735 decl2 = TREE_OPERAND (decl2, 0);
1736 gcc_assert (DECL_P (decl2));
1737 install_var_local (decl2, ctx);
1738 fixup_remapped_decl (decl2, ctx, false);
1740 install_var_local (decl, ctx);
1742 fixup_remapped_decl (decl, ctx,
1743 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1744 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1745 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1746 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1747 scan_array_reductions = true;
1748 break;
1750 case OMP_CLAUSE_REDUCTION:
1751 case OMP_CLAUSE_IN_REDUCTION:
1752 decl = OMP_CLAUSE_DECL (c);
1753 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1755 if (is_variable_sized (decl))
1756 install_var_local (decl, ctx);
1757 fixup_remapped_decl (decl, ctx, false);
1759 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1760 scan_array_reductions = true;
1761 break;
1763 case OMP_CLAUSE_TASK_REDUCTION:
1764 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1765 scan_array_reductions = true;
1766 break;
1768 case OMP_CLAUSE_SHARED:
1769 /* Ignore shared directives in teams construct inside of
1770 target construct. */
1771 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1772 && !is_host_teams_ctx (ctx))
1773 break;
1774 decl = OMP_CLAUSE_DECL (c);
1775 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1776 break;
1777 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1779 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1780 ctx->outer)))
1781 break;
1782 bool by_ref = use_pointer_for_field (decl, ctx);
1783 install_var_field (decl, by_ref, 11, ctx);
1784 break;
1786 fixup_remapped_decl (decl, ctx, false);
1787 break;
1789 case OMP_CLAUSE_MAP:
1790 if (!is_gimple_omp_offloaded (ctx->stmt))
1791 break;
1792 decl = OMP_CLAUSE_DECL (c);
1793 if (DECL_P (decl)
1794 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1795 && (OMP_CLAUSE_MAP_KIND (c)
1796 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1797 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1798 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1799 && varpool_node::get_create (decl)->offloadable)
1800 break;
1801 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1802 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1803 && is_omp_target (ctx->stmt)
1804 && !is_gimple_omp_offloaded (ctx->stmt))
1805 break;
1806 if (DECL_P (decl))
1808 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1809 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1810 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1811 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1813 tree new_decl = lookup_decl (decl, ctx);
1814 TREE_TYPE (new_decl)
1815 = remap_type (TREE_TYPE (decl), &ctx->cb);
1817 else if (DECL_SIZE (decl)
1818 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1820 tree decl2 = DECL_VALUE_EXPR (decl);
1821 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1822 decl2 = TREE_OPERAND (decl2, 0);
1823 gcc_assert (DECL_P (decl2));
1824 fixup_remapped_decl (decl2, ctx, false);
1825 fixup_remapped_decl (decl, ctx, true);
1827 else
1828 fixup_remapped_decl (decl, ctx, false);
1830 break;
1832 case OMP_CLAUSE_COPYPRIVATE:
1833 case OMP_CLAUSE_COPYIN:
1834 case OMP_CLAUSE_DEFAULT:
1835 case OMP_CLAUSE_IF:
1836 case OMP_CLAUSE_NUM_THREADS:
1837 case OMP_CLAUSE_NUM_TEAMS:
1838 case OMP_CLAUSE_THREAD_LIMIT:
1839 case OMP_CLAUSE_DEVICE:
1840 case OMP_CLAUSE_SCHEDULE:
1841 case OMP_CLAUSE_DIST_SCHEDULE:
1842 case OMP_CLAUSE_NOWAIT:
1843 case OMP_CLAUSE_ORDERED:
1844 case OMP_CLAUSE_COLLAPSE:
1845 case OMP_CLAUSE_UNTIED:
1846 case OMP_CLAUSE_FINAL:
1847 case OMP_CLAUSE_MERGEABLE:
1848 case OMP_CLAUSE_PROC_BIND:
1849 case OMP_CLAUSE_SAFELEN:
1850 case OMP_CLAUSE_SIMDLEN:
1851 case OMP_CLAUSE_ALIGNED:
1852 case OMP_CLAUSE_DEPEND:
1853 case OMP_CLAUSE_DETACH:
1854 case OMP_CLAUSE_ALLOCATE:
1855 case OMP_CLAUSE__LOOPTEMP_:
1856 case OMP_CLAUSE__REDUCTEMP_:
1857 case OMP_CLAUSE_TO:
1858 case OMP_CLAUSE_FROM:
1859 case OMP_CLAUSE_PRIORITY:
1860 case OMP_CLAUSE_GRAINSIZE:
1861 case OMP_CLAUSE_NUM_TASKS:
1862 case OMP_CLAUSE_THREADS:
1863 case OMP_CLAUSE_SIMD:
1864 case OMP_CLAUSE_NOGROUP:
1865 case OMP_CLAUSE_DEFAULTMAP:
1866 case OMP_CLAUSE_ORDER:
1867 case OMP_CLAUSE_BIND:
1868 case OMP_CLAUSE_USE_DEVICE_PTR:
1869 case OMP_CLAUSE_USE_DEVICE_ADDR:
1870 case OMP_CLAUSE_NONTEMPORAL:
1871 case OMP_CLAUSE_ASYNC:
1872 case OMP_CLAUSE_WAIT:
1873 case OMP_CLAUSE_NUM_GANGS:
1874 case OMP_CLAUSE_NUM_WORKERS:
1875 case OMP_CLAUSE_VECTOR_LENGTH:
1876 case OMP_CLAUSE_GANG:
1877 case OMP_CLAUSE_WORKER:
1878 case OMP_CLAUSE_VECTOR:
1879 case OMP_CLAUSE_INDEPENDENT:
1880 case OMP_CLAUSE_AUTO:
1881 case OMP_CLAUSE_SEQ:
1882 case OMP_CLAUSE_TILE:
1883 case OMP_CLAUSE__SIMT_:
1884 case OMP_CLAUSE_IF_PRESENT:
1885 case OMP_CLAUSE_FINALIZE:
1886 case OMP_CLAUSE_FILTER:
1887 case OMP_CLAUSE__CONDTEMP_:
1888 break;
1890 case OMP_CLAUSE__CACHE_:
1891 case OMP_CLAUSE_NOHOST:
1892 default:
1893 gcc_unreachable ();
1897 gcc_checking_assert (!scan_array_reductions
1898 || !is_gimple_omp_oacc (ctx->stmt));
1899 if (scan_array_reductions)
1901 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1902 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1903 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1904 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1905 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1907 omp_context *rctx = ctx;
1908 if (is_omp_target (ctx->stmt))
1909 rctx = ctx->outer;
1910 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1911 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
1913 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1914 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1915 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1916 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1917 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1918 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1922 /* Create a new name for omp child function. Returns an identifier. */
1924 static tree
1925 create_omp_child_function_name (bool task_copy)
1927 return clone_function_name_numbered (current_function_decl,
1928 task_copy ? "_omp_cpyfn" : "_omp_fn");
1931 /* Return true if CTX may belong to offloaded code: either if current function
1932 is offloaded, or any enclosing context corresponds to a target region. */
1934 static bool
1935 omp_maybe_offloaded_ctx (omp_context *ctx)
1937 if (cgraph_node::get (current_function_decl)->offloadable)
1938 return true;
1939 for (; ctx; ctx = ctx->outer)
1940 if (is_gimple_omp_offloaded (ctx->stmt))
1941 return true;
1942 return false;
1945 /* Build a decl for the omp child function. It'll not contain a body
1946 yet, just the bare decl. */
1948 static void
1949 create_omp_child_function (omp_context *ctx, bool task_copy)
1951 tree decl, type, name, t;
1953 name = create_omp_child_function_name (task_copy);
1954 if (task_copy)
1955 type = build_function_type_list (void_type_node, ptr_type_node,
1956 ptr_type_node, NULL_TREE);
1957 else
1958 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1960 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1962 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1963 || !task_copy);
1964 if (!task_copy)
1965 ctx->cb.dst_fn = decl;
1966 else
1967 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1969 TREE_STATIC (decl) = 1;
1970 TREE_USED (decl) = 1;
1971 DECL_ARTIFICIAL (decl) = 1;
1972 DECL_IGNORED_P (decl) = 0;
1973 TREE_PUBLIC (decl) = 0;
1974 DECL_UNINLINABLE (decl) = 1;
1975 DECL_EXTERNAL (decl) = 0;
1976 DECL_CONTEXT (decl) = NULL_TREE;
1977 DECL_INITIAL (decl) = make_node (BLOCK);
1978 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1979 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1980 /* Remove omp declare simd attribute from the new attributes. */
1981 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1983 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1984 a = a2;
1985 a = TREE_CHAIN (a);
1986 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1987 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1988 *p = TREE_CHAIN (*p);
1989 else
1991 tree chain = TREE_CHAIN (*p);
1992 *p = copy_node (*p);
1993 p = &TREE_CHAIN (*p);
1994 *p = chain;
1997 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1998 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1999 DECL_FUNCTION_SPECIFIC_TARGET (decl)
2000 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2001 DECL_FUNCTION_VERSIONED (decl)
2002 = DECL_FUNCTION_VERSIONED (current_function_decl);
2004 if (omp_maybe_offloaded_ctx (ctx))
2006 cgraph_node::get_create (decl)->offloadable = 1;
2007 if (ENABLE_OFFLOADING)
2008 g->have_offload = true;
2011 if (cgraph_node::get_create (decl)->offloadable)
2013 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2014 ? "omp target entrypoint"
2015 : "omp declare target");
2016 if (lookup_attribute ("omp declare target",
2017 DECL_ATTRIBUTES (current_function_decl)))
2019 if (is_gimple_omp_offloaded (ctx->stmt))
2020 DECL_ATTRIBUTES (decl)
2021 = remove_attribute ("omp declare target",
2022 copy_list (DECL_ATTRIBUTES (decl)));
2023 else
2024 target_attr = NULL;
2026 if (target_attr)
2027 DECL_ATTRIBUTES (decl)
2028 = tree_cons (get_identifier (target_attr),
2029 NULL_TREE, DECL_ATTRIBUTES (decl));
2032 t = build_decl (DECL_SOURCE_LOCATION (decl),
2033 RESULT_DECL, NULL_TREE, void_type_node);
2034 DECL_ARTIFICIAL (t) = 1;
2035 DECL_IGNORED_P (t) = 1;
2036 DECL_CONTEXT (t) = decl;
2037 DECL_RESULT (decl) = t;
2039 tree data_name = get_identifier (".omp_data_i");
2040 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2041 ptr_type_node);
2042 DECL_ARTIFICIAL (t) = 1;
2043 DECL_NAMELESS (t) = 1;
2044 DECL_ARG_TYPE (t) = ptr_type_node;
2045 DECL_CONTEXT (t) = current_function_decl;
2046 TREE_USED (t) = 1;
2047 TREE_READONLY (t) = 1;
2048 DECL_ARGUMENTS (decl) = t;
2049 if (!task_copy)
2050 ctx->receiver_decl = t;
2051 else
2053 t = build_decl (DECL_SOURCE_LOCATION (decl),
2054 PARM_DECL, get_identifier (".omp_data_o"),
2055 ptr_type_node);
2056 DECL_ARTIFICIAL (t) = 1;
2057 DECL_NAMELESS (t) = 1;
2058 DECL_ARG_TYPE (t) = ptr_type_node;
2059 DECL_CONTEXT (t) = current_function_decl;
2060 TREE_USED (t) = 1;
2061 TREE_ADDRESSABLE (t) = 1;
2062 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2063 DECL_ARGUMENTS (decl) = t;
2066 /* Allocate memory for the function structure. The call to
2067 allocate_struct_function clobbers CFUN, so we need to restore
2068 it afterward. */
2069 push_struct_function (decl);
2070 cfun->function_end_locus = gimple_location (ctx->stmt);
2071 init_tree_ssa (cfun);
2072 pop_cfun ();
2075 /* Callback for walk_gimple_seq. Check if combined parallel
2076 contains gimple_omp_for_combined_into_p OMP_FOR. */
2078 tree
2079 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2080 bool *handled_ops_p,
2081 struct walk_stmt_info *wi)
2083 gimple *stmt = gsi_stmt (*gsi_p);
2085 *handled_ops_p = true;
2086 switch (gimple_code (stmt))
2088 WALK_SUBSTMTS;
2090 case GIMPLE_OMP_FOR:
2091 if (gimple_omp_for_combined_into_p (stmt)
2092 && gimple_omp_for_kind (stmt)
2093 == *(const enum gf_mask *) (wi->info))
2095 wi->info = stmt;
2096 return integer_zero_node;
2098 break;
2099 default:
2100 break;
2102 return NULL;
2105 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2107 static void
2108 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2109 omp_context *outer_ctx)
2111 struct walk_stmt_info wi;
2113 memset (&wi, 0, sizeof (wi));
2114 wi.val_only = true;
2115 wi.info = (void *) &msk;
2116 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2117 if (wi.info != (void *) &msk)
2119 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2120 struct omp_for_data fd;
2121 omp_extract_for_data (for_stmt, &fd, NULL);
2122 /* We need two temporaries with fd.loop.v type (istart/iend)
2123 and then (fd.collapse - 1) temporaries with the same
2124 type for count2 ... countN-1 vars if not constant. */
2125 size_t count = 2, i;
2126 tree type = fd.iter_type;
2127 if (fd.collapse > 1
2128 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2130 count += fd.collapse - 1;
2131 /* If there are lastprivate clauses on the inner
2132 GIMPLE_OMP_FOR, add one more temporaries for the total number
2133 of iterations (product of count1 ... countN-1). */
2134 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2135 OMP_CLAUSE_LASTPRIVATE)
2136 || (msk == GF_OMP_FOR_KIND_FOR
2137 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2138 OMP_CLAUSE_LASTPRIVATE)))
2140 tree temp = create_tmp_var (type);
2141 tree c = build_omp_clause (UNKNOWN_LOCATION,
2142 OMP_CLAUSE__LOOPTEMP_);
2143 insert_decl_map (&outer_ctx->cb, temp, temp);
2144 OMP_CLAUSE_DECL (c) = temp;
2145 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2146 gimple_omp_taskreg_set_clauses (stmt, c);
2148 if (fd.non_rect
2149 && fd.last_nonrect == fd.first_nonrect + 1)
2150 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2151 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2153 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2154 tree type2 = TREE_TYPE (v);
2155 count++;
2156 for (i = 0; i < 3; i++)
2158 tree temp = create_tmp_var (type2);
2159 tree c = build_omp_clause (UNKNOWN_LOCATION,
2160 OMP_CLAUSE__LOOPTEMP_);
2161 insert_decl_map (&outer_ctx->cb, temp, temp);
2162 OMP_CLAUSE_DECL (c) = temp;
2163 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2164 gimple_omp_taskreg_set_clauses (stmt, c);
2168 for (i = 0; i < count; i++)
2170 tree temp = create_tmp_var (type);
2171 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2172 insert_decl_map (&outer_ctx->cb, temp, temp);
2173 OMP_CLAUSE_DECL (c) = temp;
2174 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2175 gimple_omp_taskreg_set_clauses (stmt, c);
2178 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2179 && omp_find_clause (gimple_omp_task_clauses (stmt),
2180 OMP_CLAUSE_REDUCTION))
2182 tree type = build_pointer_type (pointer_sized_int_node);
2183 tree temp = create_tmp_var (type);
2184 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2185 insert_decl_map (&outer_ctx->cb, temp, temp);
2186 OMP_CLAUSE_DECL (c) = temp;
2187 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2188 gimple_omp_task_set_clauses (stmt, c);
2192 /* Scan an OpenMP parallel directive. */
2194 static void
2195 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2197 omp_context *ctx;
2198 tree name;
2199 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2201 /* Ignore parallel directives with empty bodies, unless there
2202 are copyin clauses. */
2203 if (optimize > 0
2204 && empty_body_p (gimple_omp_body (stmt))
2205 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2206 OMP_CLAUSE_COPYIN) == NULL)
2208 gsi_replace (gsi, gimple_build_nop (), false);
2209 return;
2212 if (gimple_omp_parallel_combined_p (stmt))
2213 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2214 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2215 OMP_CLAUSE_REDUCTION);
2216 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2217 if (OMP_CLAUSE_REDUCTION_TASK (c))
2219 tree type = build_pointer_type (pointer_sized_int_node);
2220 tree temp = create_tmp_var (type);
2221 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2222 if (outer_ctx)
2223 insert_decl_map (&outer_ctx->cb, temp, temp);
2224 OMP_CLAUSE_DECL (c) = temp;
2225 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2226 gimple_omp_parallel_set_clauses (stmt, c);
2227 break;
2229 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2230 break;
2232 ctx = new_omp_context (stmt, outer_ctx);
2233 taskreg_contexts.safe_push (ctx);
2234 if (taskreg_nesting_level > 1)
2235 ctx->is_nested = true;
2236 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2237 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2238 name = create_tmp_var_name (".omp_data_s");
2239 name = build_decl (gimple_location (stmt),
2240 TYPE_DECL, name, ctx->record_type);
2241 DECL_ARTIFICIAL (name) = 1;
2242 DECL_NAMELESS (name) = 1;
2243 TYPE_NAME (ctx->record_type) = name;
2244 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2245 create_omp_child_function (ctx, false);
2246 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2248 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2249 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2251 if (TYPE_FIELDS (ctx->record_type) == NULL)
2252 ctx->record_type = ctx->receiver_decl = NULL;
2255 /* Scan an OpenMP task directive. */
2257 static void
2258 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2260 omp_context *ctx;
2261 tree name, t;
2262 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2264 /* Ignore task directives with empty bodies, unless they have depend
2265 clause. */
2266 if (optimize > 0
2267 && gimple_omp_body (stmt)
2268 && empty_body_p (gimple_omp_body (stmt))
2269 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2271 gsi_replace (gsi, gimple_build_nop (), false);
2272 return;
2275 if (gimple_omp_task_taskloop_p (stmt))
2276 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2278 ctx = new_omp_context (stmt, outer_ctx);
2280 if (gimple_omp_task_taskwait_p (stmt))
2282 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2283 return;
2286 taskreg_contexts.safe_push (ctx);
2287 if (taskreg_nesting_level > 1)
2288 ctx->is_nested = true;
2289 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2290 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2291 name = create_tmp_var_name (".omp_data_s");
2292 name = build_decl (gimple_location (stmt),
2293 TYPE_DECL, name, ctx->record_type);
2294 DECL_ARTIFICIAL (name) = 1;
2295 DECL_NAMELESS (name) = 1;
2296 TYPE_NAME (ctx->record_type) = name;
2297 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2298 create_omp_child_function (ctx, false);
2299 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2301 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2303 if (ctx->srecord_type)
2305 name = create_tmp_var_name (".omp_data_a");
2306 name = build_decl (gimple_location (stmt),
2307 TYPE_DECL, name, ctx->srecord_type);
2308 DECL_ARTIFICIAL (name) = 1;
2309 DECL_NAMELESS (name) = 1;
2310 TYPE_NAME (ctx->srecord_type) = name;
2311 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2312 create_omp_child_function (ctx, true);
2315 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2317 if (TYPE_FIELDS (ctx->record_type) == NULL)
2319 ctx->record_type = ctx->receiver_decl = NULL;
2320 t = build_int_cst (long_integer_type_node, 0);
2321 gimple_omp_task_set_arg_size (stmt, t);
2322 t = build_int_cst (long_integer_type_node, 1);
2323 gimple_omp_task_set_arg_align (stmt, t);
2327 /* Helper function for finish_taskreg_scan, called through walk_tree.
2328 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2329 tree, replace it in the expression. */
2331 static tree
2332 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2334 if (VAR_P (*tp))
2336 omp_context *ctx = (omp_context *) data;
2337 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2338 if (t != *tp)
2340 if (DECL_HAS_VALUE_EXPR_P (t))
2341 t = unshare_expr (DECL_VALUE_EXPR (t));
2342 *tp = t;
2344 *walk_subtrees = 0;
2346 else if (IS_TYPE_OR_DECL_P (*tp))
2347 *walk_subtrees = 0;
2348 return NULL_TREE;
2351 /* If any decls have been made addressable during scan_omp,
2352 adjust their fields if needed, and layout record types
2353 of parallel/task constructs. */
2355 static void
2356 finish_taskreg_scan (omp_context *ctx)
2358 if (ctx->record_type == NULL_TREE)
2359 return;
2361 /* If any task_shared_vars were needed, verify all
2362 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2363 statements if use_pointer_for_field hasn't changed
2364 because of that. If it did, update field types now. */
2365 if (task_shared_vars)
2367 tree c;
2369 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2370 c; c = OMP_CLAUSE_CHAIN (c))
2371 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2372 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2374 tree decl = OMP_CLAUSE_DECL (c);
2376 /* Global variables don't need to be copied,
2377 the receiver side will use them directly. */
2378 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2379 continue;
2380 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2381 || !use_pointer_for_field (decl, ctx))
2382 continue;
2383 tree field = lookup_field (decl, ctx);
2384 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2385 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2386 continue;
2387 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2388 TREE_THIS_VOLATILE (field) = 0;
2389 DECL_USER_ALIGN (field) = 0;
2390 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2391 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2392 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2393 if (ctx->srecord_type)
2395 tree sfield = lookup_sfield (decl, ctx);
2396 TREE_TYPE (sfield) = TREE_TYPE (field);
2397 TREE_THIS_VOLATILE (sfield) = 0;
2398 DECL_USER_ALIGN (sfield) = 0;
2399 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2400 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2401 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2406 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2408 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2409 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2410 if (c)
2412 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2413 expects to find it at the start of data. */
2414 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2415 tree *p = &TYPE_FIELDS (ctx->record_type);
2416 while (*p)
2417 if (*p == f)
2419 *p = DECL_CHAIN (*p);
2420 break;
2422 else
2423 p = &DECL_CHAIN (*p);
2424 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2425 TYPE_FIELDS (ctx->record_type) = f;
2427 layout_type (ctx->record_type);
2428 fixup_child_record_type (ctx);
2430 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2432 layout_type (ctx->record_type);
2433 fixup_child_record_type (ctx);
2435 else
2437 location_t loc = gimple_location (ctx->stmt);
2438 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2439 tree detach_clause
2440 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2441 OMP_CLAUSE_DETACH);
2442 /* Move VLA fields to the end. */
2443 p = &TYPE_FIELDS (ctx->record_type);
2444 while (*p)
2445 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2446 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2448 *q = *p;
2449 *p = TREE_CHAIN (*p);
2450 TREE_CHAIN (*q) = NULL_TREE;
2451 q = &TREE_CHAIN (*q);
2453 else
2454 p = &DECL_CHAIN (*p);
2455 *p = vla_fields;
2456 if (gimple_omp_task_taskloop_p (ctx->stmt))
2458 /* Move fields corresponding to first and second _looptemp_
2459 clause first. There are filled by GOMP_taskloop
2460 and thus need to be in specific positions. */
2461 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2462 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2463 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2464 OMP_CLAUSE__LOOPTEMP_);
2465 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2466 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2467 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2468 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2469 p = &TYPE_FIELDS (ctx->record_type);
2470 while (*p)
2471 if (*p == f1 || *p == f2 || *p == f3)
2472 *p = DECL_CHAIN (*p);
2473 else
2474 p = &DECL_CHAIN (*p);
2475 DECL_CHAIN (f1) = f2;
2476 if (c3)
2478 DECL_CHAIN (f2) = f3;
2479 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2481 else
2482 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2483 TYPE_FIELDS (ctx->record_type) = f1;
2484 if (ctx->srecord_type)
2486 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2487 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2488 if (c3)
2489 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2490 p = &TYPE_FIELDS (ctx->srecord_type);
2491 while (*p)
2492 if (*p == f1 || *p == f2 || *p == f3)
2493 *p = DECL_CHAIN (*p);
2494 else
2495 p = &DECL_CHAIN (*p);
2496 DECL_CHAIN (f1) = f2;
2497 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2498 if (c3)
2500 DECL_CHAIN (f2) = f3;
2501 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2503 else
2504 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2505 TYPE_FIELDS (ctx->srecord_type) = f1;
2508 if (detach_clause)
2510 tree c, field;
2512 /* Look for a firstprivate clause with the detach event handle. */
2513 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2514 c; c = OMP_CLAUSE_CHAIN (c))
2516 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2517 continue;
2518 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2519 == OMP_CLAUSE_DECL (detach_clause))
2520 break;
2523 gcc_assert (c);
2524 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2526 /* Move field corresponding to the detach clause first.
2527 This is filled by GOMP_task and needs to be in a
2528 specific position. */
2529 p = &TYPE_FIELDS (ctx->record_type);
2530 while (*p)
2531 if (*p == field)
2532 *p = DECL_CHAIN (*p);
2533 else
2534 p = &DECL_CHAIN (*p);
2535 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2536 TYPE_FIELDS (ctx->record_type) = field;
2537 if (ctx->srecord_type)
2539 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2540 p = &TYPE_FIELDS (ctx->srecord_type);
2541 while (*p)
2542 if (*p == field)
2543 *p = DECL_CHAIN (*p);
2544 else
2545 p = &DECL_CHAIN (*p);
2546 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2547 TYPE_FIELDS (ctx->srecord_type) = field;
2550 layout_type (ctx->record_type);
2551 fixup_child_record_type (ctx);
2552 if (ctx->srecord_type)
2553 layout_type (ctx->srecord_type);
2554 tree t = fold_convert_loc (loc, long_integer_type_node,
2555 TYPE_SIZE_UNIT (ctx->record_type));
2556 if (TREE_CODE (t) != INTEGER_CST)
2558 t = unshare_expr (t);
2559 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2561 gimple_omp_task_set_arg_size (ctx->stmt, t);
2562 t = build_int_cst (long_integer_type_node,
2563 TYPE_ALIGN_UNIT (ctx->record_type));
2564 gimple_omp_task_set_arg_align (ctx->stmt, t);
2568 /* Find the enclosing offload context. */
2570 static omp_context *
2571 enclosing_target_ctx (omp_context *ctx)
2573 for (; ctx; ctx = ctx->outer)
2574 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2575 break;
2577 return ctx;
2580 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2581 construct.
2582 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2584 static bool
2585 ctx_in_oacc_kernels_region (omp_context *ctx)
2587 for (;ctx != NULL; ctx = ctx->outer)
2589 gimple *stmt = ctx->stmt;
2590 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2591 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2592 return true;
2595 return false;
2598 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2599 (This doesn't include OpenACC 'kernels' decomposed parts.)
2600 Until kernels handling moves to use the same loop indirection
2601 scheme as parallel, we need to do this checking early. */
2603 static unsigned
2604 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2606 bool checking = true;
2607 unsigned outer_mask = 0;
2608 unsigned this_mask = 0;
2609 bool has_seq = false, has_auto = false;
2611 if (ctx->outer)
2612 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2613 if (!stmt)
2615 checking = false;
2616 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2617 return outer_mask;
2618 stmt = as_a <gomp_for *> (ctx->stmt);
2621 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2623 switch (OMP_CLAUSE_CODE (c))
2625 case OMP_CLAUSE_GANG:
2626 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2627 break;
2628 case OMP_CLAUSE_WORKER:
2629 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2630 break;
2631 case OMP_CLAUSE_VECTOR:
2632 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2633 break;
2634 case OMP_CLAUSE_SEQ:
2635 has_seq = true;
2636 break;
2637 case OMP_CLAUSE_AUTO:
2638 has_auto = true;
2639 break;
2640 default:
2641 break;
2645 if (checking)
2647 if (has_seq && (this_mask || has_auto))
2648 error_at (gimple_location (stmt), "%<seq%> overrides other"
2649 " OpenACC loop specifiers");
2650 else if (has_auto && this_mask)
2651 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2652 " OpenACC loop specifiers");
2654 if (this_mask & outer_mask)
2655 error_at (gimple_location (stmt), "inner loop uses same"
2656 " OpenACC parallelism as containing loop");
2659 return outer_mask | this_mask;
2662 /* Scan a GIMPLE_OMP_FOR. */
2664 static omp_context *
2665 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2667 omp_context *ctx;
2668 size_t i;
2669 tree clauses = gimple_omp_for_clauses (stmt);
2671 ctx = new_omp_context (stmt, outer_ctx);
2673 if (is_gimple_omp_oacc (stmt))
2675 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2677 if (!(tgt && is_oacc_kernels (tgt)))
2678 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2680 tree c_op0;
2681 switch (OMP_CLAUSE_CODE (c))
2683 case OMP_CLAUSE_GANG:
2684 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2685 break;
2687 case OMP_CLAUSE_WORKER:
2688 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2689 break;
2691 case OMP_CLAUSE_VECTOR:
2692 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2693 break;
2695 default:
2696 continue;
2699 if (c_op0)
2701 /* By construction, this is impossible for OpenACC 'kernels'
2702 decomposed parts. */
2703 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2705 error_at (OMP_CLAUSE_LOCATION (c),
2706 "argument not permitted on %qs clause",
2707 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2708 if (tgt)
2709 inform (gimple_location (tgt->stmt),
2710 "enclosing parent compute construct");
2711 else if (oacc_get_fn_attrib (current_function_decl))
2712 inform (DECL_SOURCE_LOCATION (current_function_decl),
2713 "enclosing routine");
2714 else
2715 gcc_unreachable ();
2719 if (tgt && is_oacc_kernels (tgt))
2720 check_oacc_kernel_gwv (stmt, ctx);
2722 /* Collect all variables named in reductions on this loop. Ensure
2723 that, if this loop has a reduction on some variable v, and there is
2724 a reduction on v somewhere in an outer context, then there is a
2725 reduction on v on all intervening loops as well. */
2726 tree local_reduction_clauses = NULL;
2727 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2729 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2730 local_reduction_clauses
2731 = tree_cons (NULL, c, local_reduction_clauses);
2733 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2734 ctx->outer_reduction_clauses
2735 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2736 ctx->outer->outer_reduction_clauses);
2737 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2738 tree local_iter = local_reduction_clauses;
2739 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2741 tree local_clause = TREE_VALUE (local_iter);
2742 tree local_var = OMP_CLAUSE_DECL (local_clause);
2743 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2744 bool have_outer_reduction = false;
2745 tree ctx_iter = outer_reduction_clauses;
2746 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2748 tree outer_clause = TREE_VALUE (ctx_iter);
2749 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2750 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2751 if (outer_var == local_var && outer_op != local_op)
2753 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2754 "conflicting reduction operations for %qE",
2755 local_var);
2756 inform (OMP_CLAUSE_LOCATION (outer_clause),
2757 "location of the previous reduction for %qE",
2758 outer_var);
2760 if (outer_var == local_var)
2762 have_outer_reduction = true;
2763 break;
2766 if (have_outer_reduction)
2768 /* There is a reduction on outer_var both on this loop and on
2769 some enclosing loop. Walk up the context tree until such a
2770 loop with a reduction on outer_var is found, and complain
2771 about all intervening loops that do not have such a
2772 reduction. */
2773 struct omp_context *curr_loop = ctx->outer;
2774 bool found = false;
2775 while (curr_loop != NULL)
2777 tree curr_iter = curr_loop->local_reduction_clauses;
2778 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2780 tree curr_clause = TREE_VALUE (curr_iter);
2781 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2782 if (curr_var == local_var)
2784 found = true;
2785 break;
2788 if (!found)
2789 warning_at (gimple_location (curr_loop->stmt), 0,
2790 "nested loop in reduction needs "
2791 "reduction clause for %qE",
2792 local_var);
2793 else
2794 break;
2795 curr_loop = curr_loop->outer;
2799 ctx->local_reduction_clauses = local_reduction_clauses;
2800 ctx->outer_reduction_clauses
2801 = chainon (unshare_expr (ctx->local_reduction_clauses),
2802 ctx->outer_reduction_clauses);
2804 if (tgt && is_oacc_kernels (tgt))
2806 /* Strip out reductions, as they are not handled yet. */
2807 tree *prev_ptr = &clauses;
2809 while (tree probe = *prev_ptr)
2811 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2813 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2814 *prev_ptr = *next_ptr;
2815 else
2816 prev_ptr = next_ptr;
2819 gimple_omp_for_set_clauses (stmt, clauses);
2823 scan_sharing_clauses (clauses, ctx);
2825 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2826 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2828 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2829 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2830 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2831 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2833 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2834 return ctx;
2837 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2839 static void
2840 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2841 omp_context *outer_ctx)
2843 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2844 gsi_replace (gsi, bind, false);
2845 gimple_seq seq = NULL;
2846 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2847 tree cond = create_tmp_var_raw (integer_type_node);
2848 DECL_CONTEXT (cond) = current_function_decl;
2849 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2850 gimple_bind_set_vars (bind, cond);
2851 gimple_call_set_lhs (g, cond);
2852 gimple_seq_add_stmt (&seq, g);
2853 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2854 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2855 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2856 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2857 gimple_seq_add_stmt (&seq, g);
2858 g = gimple_build_label (lab1);
2859 gimple_seq_add_stmt (&seq, g);
2860 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2861 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2862 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2863 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2864 gimple_omp_for_set_clauses (new_stmt, clause);
2865 gimple_seq_add_stmt (&seq, new_stmt);
2866 g = gimple_build_goto (lab3);
2867 gimple_seq_add_stmt (&seq, g);
2868 g = gimple_build_label (lab2);
2869 gimple_seq_add_stmt (&seq, g);
2870 gimple_seq_add_stmt (&seq, stmt);
2871 g = gimple_build_label (lab3);
2872 gimple_seq_add_stmt (&seq, g);
2873 gimple_bind_set_body (bind, seq);
2874 update_stmt (bind);
2875 scan_omp_for (new_stmt, outer_ctx);
2876 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2879 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2880 struct walk_stmt_info *);
2881 static omp_context *maybe_lookup_ctx (gimple *);
2883 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2884 for scan phase loop. */
2886 static void
2887 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2888 omp_context *outer_ctx)
2890 /* The only change between inclusive and exclusive scan will be
2891 within the first simd loop, so just use inclusive in the
2892 worksharing loop. */
2893 outer_ctx->scan_inclusive = true;
2894 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2895 OMP_CLAUSE_DECL (c) = integer_zero_node;
2897 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2898 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2899 gsi_replace (gsi, input_stmt, false);
2900 gimple_seq input_body = NULL;
2901 gimple_seq_add_stmt (&input_body, stmt);
2902 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2904 gimple_stmt_iterator input1_gsi = gsi_none ();
2905 struct walk_stmt_info wi;
2906 memset (&wi, 0, sizeof (wi));
2907 wi.val_only = true;
2908 wi.info = (void *) &input1_gsi;
2909 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2910 gcc_assert (!gsi_end_p (input1_gsi));
2912 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2913 gsi_next (&input1_gsi);
2914 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2915 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2916 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2917 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2918 std::swap (input_stmt1, scan_stmt1);
2920 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2921 gimple_omp_set_body (input_stmt1, NULL);
2923 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2924 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2926 gimple_omp_set_body (input_stmt1, input_body1);
2927 gimple_omp_set_body (scan_stmt1, NULL);
2929 gimple_stmt_iterator input2_gsi = gsi_none ();
2930 memset (&wi, 0, sizeof (wi));
2931 wi.val_only = true;
2932 wi.info = (void *) &input2_gsi;
2933 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2934 NULL, &wi);
2935 gcc_assert (!gsi_end_p (input2_gsi));
2937 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2938 gsi_next (&input2_gsi);
2939 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2940 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2941 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2942 std::swap (input_stmt2, scan_stmt2);
2944 gimple_omp_set_body (input_stmt2, NULL);
2946 gimple_omp_set_body (input_stmt, input_body);
2947 gimple_omp_set_body (scan_stmt, scan_body);
2949 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2950 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2952 ctx = new_omp_context (scan_stmt, outer_ctx);
2953 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2955 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2958 /* Scan an OpenMP sections directive. */
2960 static void
2961 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2963 omp_context *ctx;
2965 ctx = new_omp_context (stmt, outer_ctx);
2966 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2967 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2970 /* Scan an OpenMP single directive. */
2972 static void
2973 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2975 omp_context *ctx;
2976 tree name;
2978 ctx = new_omp_context (stmt, outer_ctx);
2979 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2980 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2981 name = create_tmp_var_name (".omp_copy_s");
2982 name = build_decl (gimple_location (stmt),
2983 TYPE_DECL, name, ctx->record_type);
2984 TYPE_NAME (ctx->record_type) = name;
2986 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2987 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2989 if (TYPE_FIELDS (ctx->record_type) == NULL)
2990 ctx->record_type = NULL;
2991 else
2992 layout_type (ctx->record_type);
2995 /* Scan a GIMPLE_OMP_TARGET. */
2997 static void
2998 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3000 omp_context *ctx;
3001 tree name;
3002 bool offloaded = is_gimple_omp_offloaded (stmt);
3003 tree clauses = gimple_omp_target_clauses (stmt);
3005 ctx = new_omp_context (stmt, outer_ctx);
3006 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3007 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3008 name = create_tmp_var_name (".omp_data_t");
3009 name = build_decl (gimple_location (stmt),
3010 TYPE_DECL, name, ctx->record_type);
3011 DECL_ARTIFICIAL (name) = 1;
3012 DECL_NAMELESS (name) = 1;
3013 TYPE_NAME (ctx->record_type) = name;
3014 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3016 if (offloaded)
3018 create_omp_child_function (ctx, false);
3019 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3022 scan_sharing_clauses (clauses, ctx);
3023 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3025 if (TYPE_FIELDS (ctx->record_type) == NULL)
3026 ctx->record_type = ctx->receiver_decl = NULL;
3027 else
3029 TYPE_FIELDS (ctx->record_type)
3030 = nreverse (TYPE_FIELDS (ctx->record_type));
3031 if (flag_checking)
3033 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3034 for (tree field = TYPE_FIELDS (ctx->record_type);
3035 field;
3036 field = DECL_CHAIN (field))
3037 gcc_assert (DECL_ALIGN (field) == align);
3039 layout_type (ctx->record_type);
3040 if (offloaded)
3041 fixup_child_record_type (ctx);
3044 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3046 error_at (gimple_location (stmt),
3047 "%<target%> construct with nested %<teams%> construct "
3048 "contains directives outside of the %<teams%> construct");
3049 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3053 /* Scan an OpenMP teams directive. */
3055 static void
3056 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3058 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3060 if (!gimple_omp_teams_host (stmt))
3062 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3063 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3064 return;
3066 taskreg_contexts.safe_push (ctx);
3067 gcc_assert (taskreg_nesting_level == 1);
3068 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3069 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3070 tree name = create_tmp_var_name (".omp_data_s");
3071 name = build_decl (gimple_location (stmt),
3072 TYPE_DECL, name, ctx->record_type);
3073 DECL_ARTIFICIAL (name) = 1;
3074 DECL_NAMELESS (name) = 1;
3075 TYPE_NAME (ctx->record_type) = name;
3076 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3077 create_omp_child_function (ctx, false);
3078 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3080 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3081 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3083 if (TYPE_FIELDS (ctx->record_type) == NULL)
3084 ctx->record_type = ctx->receiver_decl = NULL;
3087 /* Check nesting restrictions. */
3088 static bool
3089 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3091 tree c;
3093 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3094 inside an OpenACC CTX. */
3095 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3096 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3097 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3099 else if (!(is_gimple_omp (stmt)
3100 && is_gimple_omp_oacc (stmt)))
3102 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3104 error_at (gimple_location (stmt),
3105 "non-OpenACC construct inside of OpenACC routine");
3106 return false;
3108 else
3109 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3110 if (is_gimple_omp (octx->stmt)
3111 && is_gimple_omp_oacc (octx->stmt))
3113 error_at (gimple_location (stmt),
3114 "non-OpenACC construct inside of OpenACC region");
3115 return false;
3119 if (ctx != NULL)
3121 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3122 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3124 c = omp_find_clause (gimple_omp_target_clauses (ctx->stmt),
3125 OMP_CLAUSE_DEVICE);
3126 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3128 error_at (gimple_location (stmt),
3129 "OpenMP constructs are not allowed in target region "
3130 "with %<ancestor%>");
3131 return false;
3134 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3135 ctx->teams_nested_p = true;
3136 else
3137 ctx->nonteams_nested_p = true;
3139 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3140 && ctx->outer
3141 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3142 ctx = ctx->outer;
3143 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3144 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3145 && !ctx->loop_p)
3147 c = NULL_TREE;
3148 if (ctx->order_concurrent
3149 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3150 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3151 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3153 error_at (gimple_location (stmt),
3154 "OpenMP constructs other than %<parallel%>, %<loop%>"
3155 " or %<simd%> may not be nested inside a region with"
3156 " the %<order(concurrent)%> clause");
3157 return false;
3159 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3161 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3162 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3164 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3165 && (ctx->outer == NULL
3166 || !gimple_omp_for_combined_into_p (ctx->stmt)
3167 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3168 || (gimple_omp_for_kind (ctx->outer->stmt)
3169 != GF_OMP_FOR_KIND_FOR)
3170 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3172 error_at (gimple_location (stmt),
3173 "%<ordered simd threads%> must be closely "
3174 "nested inside of %<%s simd%> region",
3175 lang_GNU_Fortran () ? "do" : "for");
3176 return false;
3178 return true;
3181 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3182 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3183 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3184 return true;
3185 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3186 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3187 return true;
3188 error_at (gimple_location (stmt),
3189 "OpenMP constructs other than "
3190 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3191 "not be nested inside %<simd%> region");
3192 return false;
3194 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3196 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3197 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3198 && omp_find_clause (gimple_omp_for_clauses (stmt),
3199 OMP_CLAUSE_BIND) == NULL_TREE))
3200 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3202 error_at (gimple_location (stmt),
3203 "only %<distribute%>, %<parallel%> or %<loop%> "
3204 "regions are allowed to be strictly nested inside "
3205 "%<teams%> region");
3206 return false;
3209 else if (ctx->order_concurrent
3210 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3211 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3212 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3213 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3215 if (ctx->loop_p)
3216 error_at (gimple_location (stmt),
3217 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3218 "%<simd%> may not be nested inside a %<loop%> region");
3219 else
3220 error_at (gimple_location (stmt),
3221 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3222 "%<simd%> may not be nested inside a region with "
3223 "the %<order(concurrent)%> clause");
3224 return false;
3227 switch (gimple_code (stmt))
3229 case GIMPLE_OMP_FOR:
3230 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3231 return true;
3232 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3234 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3236 error_at (gimple_location (stmt),
3237 "%<distribute%> region must be strictly nested "
3238 "inside %<teams%> construct");
3239 return false;
3241 return true;
3243 /* We split taskloop into task and nested taskloop in it. */
3244 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3245 return true;
3246 /* For now, hope this will change and loop bind(parallel) will not
3247 be allowed in lots of contexts. */
3248 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3249 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3250 return true;
3251 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3253 bool ok = false;
3255 if (ctx)
3256 switch (gimple_code (ctx->stmt))
3258 case GIMPLE_OMP_FOR:
3259 ok = (gimple_omp_for_kind (ctx->stmt)
3260 == GF_OMP_FOR_KIND_OACC_LOOP);
3261 break;
3263 case GIMPLE_OMP_TARGET:
3264 switch (gimple_omp_target_kind (ctx->stmt))
3266 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3267 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3268 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3269 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3270 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3271 ok = true;
3272 break;
3274 default:
3275 break;
3278 default:
3279 break;
3281 else if (oacc_get_fn_attrib (current_function_decl))
3282 ok = true;
3283 if (!ok)
3285 error_at (gimple_location (stmt),
3286 "OpenACC loop directive must be associated with"
3287 " an OpenACC compute region");
3288 return false;
3291 /* FALLTHRU */
3292 case GIMPLE_CALL:
3293 if (is_gimple_call (stmt)
3294 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3295 == BUILT_IN_GOMP_CANCEL
3296 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3297 == BUILT_IN_GOMP_CANCELLATION_POINT))
3299 const char *bad = NULL;
3300 const char *kind = NULL;
3301 const char *construct
3302 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3303 == BUILT_IN_GOMP_CANCEL)
3304 ? "cancel"
3305 : "cancellation point";
3306 if (ctx == NULL)
3308 error_at (gimple_location (stmt), "orphaned %qs construct",
3309 construct);
3310 return false;
3312 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3313 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3314 : 0)
3316 case 1:
3317 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3318 bad = "parallel";
3319 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3320 == BUILT_IN_GOMP_CANCEL
3321 && !integer_zerop (gimple_call_arg (stmt, 1)))
3322 ctx->cancellable = true;
3323 kind = "parallel";
3324 break;
3325 case 2:
3326 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3327 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3328 bad = "for";
3329 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3330 == BUILT_IN_GOMP_CANCEL
3331 && !integer_zerop (gimple_call_arg (stmt, 1)))
3333 ctx->cancellable = true;
3334 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3335 OMP_CLAUSE_NOWAIT))
3336 warning_at (gimple_location (stmt), 0,
3337 "%<cancel for%> inside "
3338 "%<nowait%> for construct");
3339 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3340 OMP_CLAUSE_ORDERED))
3341 warning_at (gimple_location (stmt), 0,
3342 "%<cancel for%> inside "
3343 "%<ordered%> for construct");
3345 kind = "for";
3346 break;
3347 case 4:
3348 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3349 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3350 bad = "sections";
3351 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3352 == BUILT_IN_GOMP_CANCEL
3353 && !integer_zerop (gimple_call_arg (stmt, 1)))
3355 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3357 ctx->cancellable = true;
3358 if (omp_find_clause (gimple_omp_sections_clauses
3359 (ctx->stmt),
3360 OMP_CLAUSE_NOWAIT))
3361 warning_at (gimple_location (stmt), 0,
3362 "%<cancel sections%> inside "
3363 "%<nowait%> sections construct");
3365 else
3367 gcc_assert (ctx->outer
3368 && gimple_code (ctx->outer->stmt)
3369 == GIMPLE_OMP_SECTIONS);
3370 ctx->outer->cancellable = true;
3371 if (omp_find_clause (gimple_omp_sections_clauses
3372 (ctx->outer->stmt),
3373 OMP_CLAUSE_NOWAIT))
3374 warning_at (gimple_location (stmt), 0,
3375 "%<cancel sections%> inside "
3376 "%<nowait%> sections construct");
3379 kind = "sections";
3380 break;
3381 case 8:
3382 if (!is_task_ctx (ctx)
3383 && (!is_taskloop_ctx (ctx)
3384 || ctx->outer == NULL
3385 || !is_task_ctx (ctx->outer)))
3386 bad = "task";
3387 else
3389 for (omp_context *octx = ctx->outer;
3390 octx; octx = octx->outer)
3392 switch (gimple_code (octx->stmt))
3394 case GIMPLE_OMP_TASKGROUP:
3395 break;
3396 case GIMPLE_OMP_TARGET:
3397 if (gimple_omp_target_kind (octx->stmt)
3398 != GF_OMP_TARGET_KIND_REGION)
3399 continue;
3400 /* FALLTHRU */
3401 case GIMPLE_OMP_PARALLEL:
3402 case GIMPLE_OMP_TEAMS:
3403 error_at (gimple_location (stmt),
3404 "%<%s taskgroup%> construct not closely "
3405 "nested inside of %<taskgroup%> region",
3406 construct);
3407 return false;
3408 case GIMPLE_OMP_TASK:
3409 if (gimple_omp_task_taskloop_p (octx->stmt)
3410 && octx->outer
3411 && is_taskloop_ctx (octx->outer))
3413 tree clauses
3414 = gimple_omp_for_clauses (octx->outer->stmt);
3415 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3416 break;
3418 continue;
3419 default:
3420 continue;
3422 break;
3424 ctx->cancellable = true;
3426 kind = "taskgroup";
3427 break;
3428 default:
3429 error_at (gimple_location (stmt), "invalid arguments");
3430 return false;
3432 if (bad)
3434 error_at (gimple_location (stmt),
3435 "%<%s %s%> construct not closely nested inside of %qs",
3436 construct, kind, bad);
3437 return false;
3440 /* FALLTHRU */
3441 case GIMPLE_OMP_SECTIONS:
3442 case GIMPLE_OMP_SINGLE:
3443 for (; ctx != NULL; ctx = ctx->outer)
3444 switch (gimple_code (ctx->stmt))
3446 case GIMPLE_OMP_FOR:
3447 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3448 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3449 break;
3450 /* FALLTHRU */
3451 case GIMPLE_OMP_SECTIONS:
3452 case GIMPLE_OMP_SINGLE:
3453 case GIMPLE_OMP_ORDERED:
3454 case GIMPLE_OMP_MASTER:
3455 case GIMPLE_OMP_MASKED:
3456 case GIMPLE_OMP_TASK:
3457 case GIMPLE_OMP_CRITICAL:
3458 if (is_gimple_call (stmt))
3460 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3461 != BUILT_IN_GOMP_BARRIER)
3462 return true;
3463 error_at (gimple_location (stmt),
3464 "barrier region may not be closely nested inside "
3465 "of work-sharing, %<loop%>, %<critical%>, "
3466 "%<ordered%>, %<master%>, %<masked%>, explicit "
3467 "%<task%> or %<taskloop%> region");
3468 return false;
3470 error_at (gimple_location (stmt),
3471 "work-sharing region may not be closely nested inside "
3472 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3473 "%<master%>, %<masked%>, explicit %<task%> or "
3474 "%<taskloop%> region");
3475 return false;
3476 case GIMPLE_OMP_PARALLEL:
3477 case GIMPLE_OMP_TEAMS:
3478 return true;
3479 case GIMPLE_OMP_TARGET:
3480 if (gimple_omp_target_kind (ctx->stmt)
3481 == GF_OMP_TARGET_KIND_REGION)
3482 return true;
3483 break;
3484 default:
3485 break;
3487 break;
3488 case GIMPLE_OMP_MASTER:
3489 case GIMPLE_OMP_MASKED:
3490 for (; ctx != NULL; ctx = ctx->outer)
3491 switch (gimple_code (ctx->stmt))
3493 case GIMPLE_OMP_FOR:
3494 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3495 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3496 break;
3497 /* FALLTHRU */
3498 case GIMPLE_OMP_SECTIONS:
3499 case GIMPLE_OMP_SINGLE:
3500 case GIMPLE_OMP_TASK:
3501 error_at (gimple_location (stmt),
3502 "%qs region may not be closely nested inside "
3503 "of work-sharing, %<loop%>, explicit %<task%> or "
3504 "%<taskloop%> region",
3505 gimple_code (stmt) == GIMPLE_OMP_MASTER
3506 ? "master" : "masked");
3507 return false;
3508 case GIMPLE_OMP_PARALLEL:
3509 case GIMPLE_OMP_TEAMS:
3510 return true;
3511 case GIMPLE_OMP_TARGET:
3512 if (gimple_omp_target_kind (ctx->stmt)
3513 == GF_OMP_TARGET_KIND_REGION)
3514 return true;
3515 break;
3516 default:
3517 break;
3519 break;
3520 case GIMPLE_OMP_SCOPE:
3521 for (; ctx != NULL; ctx = ctx->outer)
3522 switch (gimple_code (ctx->stmt))
3524 case GIMPLE_OMP_FOR:
3525 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3526 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3527 break;
3528 /* FALLTHRU */
3529 case GIMPLE_OMP_SECTIONS:
3530 case GIMPLE_OMP_SINGLE:
3531 case GIMPLE_OMP_TASK:
3532 case GIMPLE_OMP_CRITICAL:
3533 case GIMPLE_OMP_ORDERED:
3534 case GIMPLE_OMP_MASTER:
3535 case GIMPLE_OMP_MASKED:
3536 error_at (gimple_location (stmt),
3537 "%<scope%> region may not be closely nested inside "
3538 "of work-sharing, %<loop%>, explicit %<task%>, "
3539 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3540 "or %<masked%> region");
3541 return false;
3542 case GIMPLE_OMP_PARALLEL:
3543 case GIMPLE_OMP_TEAMS:
3544 return true;
3545 case GIMPLE_OMP_TARGET:
3546 if (gimple_omp_target_kind (ctx->stmt)
3547 == GF_OMP_TARGET_KIND_REGION)
3548 return true;
3549 break;
3550 default:
3551 break;
3553 break;
3554 case GIMPLE_OMP_TASK:
3555 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3556 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3557 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3558 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3560 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3561 error_at (OMP_CLAUSE_LOCATION (c),
3562 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3563 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3564 return false;
3566 break;
3567 case GIMPLE_OMP_ORDERED:
3568 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3569 c; c = OMP_CLAUSE_CHAIN (c))
3571 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3573 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3574 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3575 continue;
3577 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3578 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3579 || kind == OMP_CLAUSE_DEPEND_SINK)
3581 tree oclause;
3582 /* Look for containing ordered(N) loop. */
3583 if (ctx == NULL
3584 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3585 || (oclause
3586 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3587 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3589 error_at (OMP_CLAUSE_LOCATION (c),
3590 "%<ordered%> construct with %<depend%> clause "
3591 "must be closely nested inside an %<ordered%> "
3592 "loop");
3593 return false;
3595 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3597 error_at (OMP_CLAUSE_LOCATION (c),
3598 "%<ordered%> construct with %<depend%> clause "
3599 "must be closely nested inside a loop with "
3600 "%<ordered%> clause with a parameter");
3601 return false;
3604 else
3606 error_at (OMP_CLAUSE_LOCATION (c),
3607 "invalid depend kind in omp %<ordered%> %<depend%>");
3608 return false;
3611 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3612 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3614 /* ordered simd must be closely nested inside of simd region,
3615 and simd region must not encounter constructs other than
3616 ordered simd, therefore ordered simd may be either orphaned,
3617 or ctx->stmt must be simd. The latter case is handled already
3618 earlier. */
3619 if (ctx != NULL)
3621 error_at (gimple_location (stmt),
3622 "%<ordered%> %<simd%> must be closely nested inside "
3623 "%<simd%> region");
3624 return false;
3627 for (; ctx != NULL; ctx = ctx->outer)
3628 switch (gimple_code (ctx->stmt))
3630 case GIMPLE_OMP_CRITICAL:
3631 case GIMPLE_OMP_TASK:
3632 case GIMPLE_OMP_ORDERED:
3633 ordered_in_taskloop:
3634 error_at (gimple_location (stmt),
3635 "%<ordered%> region may not be closely nested inside "
3636 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3637 "%<taskloop%> region");
3638 return false;
3639 case GIMPLE_OMP_FOR:
3640 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3641 goto ordered_in_taskloop;
3642 tree o;
3643 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3644 OMP_CLAUSE_ORDERED);
3645 if (o == NULL)
3647 error_at (gimple_location (stmt),
3648 "%<ordered%> region must be closely nested inside "
3649 "a loop region with an %<ordered%> clause");
3650 return false;
3652 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3653 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3655 error_at (gimple_location (stmt),
3656 "%<ordered%> region without %<depend%> clause may "
3657 "not be closely nested inside a loop region with "
3658 "an %<ordered%> clause with a parameter");
3659 return false;
3661 return true;
3662 case GIMPLE_OMP_TARGET:
3663 if (gimple_omp_target_kind (ctx->stmt)
3664 != GF_OMP_TARGET_KIND_REGION)
3665 break;
3666 /* FALLTHRU */
3667 case GIMPLE_OMP_PARALLEL:
3668 case GIMPLE_OMP_TEAMS:
3669 error_at (gimple_location (stmt),
3670 "%<ordered%> region must be closely nested inside "
3671 "a loop region with an %<ordered%> clause");
3672 return false;
3673 default:
3674 break;
3676 break;
3677 case GIMPLE_OMP_CRITICAL:
3679 tree this_stmt_name
3680 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3681 for (; ctx != NULL; ctx = ctx->outer)
3682 if (gomp_critical *other_crit
3683 = dyn_cast <gomp_critical *> (ctx->stmt))
3684 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3686 error_at (gimple_location (stmt),
3687 "%<critical%> region may not be nested inside "
3688 "a %<critical%> region with the same name");
3689 return false;
3692 break;
3693 case GIMPLE_OMP_TEAMS:
3694 if (ctx == NULL)
3695 break;
3696 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3697 || (gimple_omp_target_kind (ctx->stmt)
3698 != GF_OMP_TARGET_KIND_REGION))
3700 /* Teams construct can appear either strictly nested inside of
3701 target construct with no intervening stmts, or can be encountered
3702 only by initial task (so must not appear inside any OpenMP
3703 construct. */
3704 error_at (gimple_location (stmt),
3705 "%<teams%> construct must be closely nested inside of "
3706 "%<target%> construct or not nested in any OpenMP "
3707 "construct");
3708 return false;
3710 break;
3711 case GIMPLE_OMP_TARGET:
3712 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3713 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3714 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3715 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3717 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3718 error_at (OMP_CLAUSE_LOCATION (c),
3719 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3720 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3721 return false;
3723 if (is_gimple_omp_offloaded (stmt)
3724 && oacc_get_fn_attrib (cfun->decl) != NULL)
3726 error_at (gimple_location (stmt),
3727 "OpenACC region inside of OpenACC routine, nested "
3728 "parallelism not supported yet");
3729 return false;
3731 for (; ctx != NULL; ctx = ctx->outer)
3733 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3735 if (is_gimple_omp (stmt)
3736 && is_gimple_omp_oacc (stmt)
3737 && is_gimple_omp (ctx->stmt))
3739 error_at (gimple_location (stmt),
3740 "OpenACC construct inside of non-OpenACC region");
3741 return false;
3743 continue;
3746 const char *stmt_name, *ctx_stmt_name;
3747 switch (gimple_omp_target_kind (stmt))
3749 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3750 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3751 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3752 case GF_OMP_TARGET_KIND_ENTER_DATA:
3753 stmt_name = "target enter data"; break;
3754 case GF_OMP_TARGET_KIND_EXIT_DATA:
3755 stmt_name = "target exit data"; break;
3756 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3757 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3758 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3759 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3760 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3761 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3762 stmt_name = "enter data"; break;
3763 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3764 stmt_name = "exit data"; break;
3765 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3766 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3767 break;
3768 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3769 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3770 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3771 /* OpenACC 'kernels' decomposed parts. */
3772 stmt_name = "kernels"; break;
3773 default: gcc_unreachable ();
3775 switch (gimple_omp_target_kind (ctx->stmt))
3777 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3778 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3779 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3780 ctx_stmt_name = "parallel"; break;
3781 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3782 ctx_stmt_name = "kernels"; break;
3783 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3784 ctx_stmt_name = "serial"; break;
3785 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3786 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3787 ctx_stmt_name = "host_data"; break;
3788 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3789 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3790 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3791 /* OpenACC 'kernels' decomposed parts. */
3792 ctx_stmt_name = "kernels"; break;
3793 default: gcc_unreachable ();
3796 /* OpenACC/OpenMP mismatch? */
3797 if (is_gimple_omp_oacc (stmt)
3798 != is_gimple_omp_oacc (ctx->stmt))
3800 error_at (gimple_location (stmt),
3801 "%s %qs construct inside of %s %qs region",
3802 (is_gimple_omp_oacc (stmt)
3803 ? "OpenACC" : "OpenMP"), stmt_name,
3804 (is_gimple_omp_oacc (ctx->stmt)
3805 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3806 return false;
3808 if (is_gimple_omp_offloaded (ctx->stmt))
3810 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3811 if (is_gimple_omp_oacc (ctx->stmt))
3813 error_at (gimple_location (stmt),
3814 "%qs construct inside of %qs region",
3815 stmt_name, ctx_stmt_name);
3816 return false;
3818 else
3820 warning_at (gimple_location (stmt), 0,
3821 "%qs construct inside of %qs region",
3822 stmt_name, ctx_stmt_name);
3826 break;
3827 default:
3828 break;
3830 return true;
3834 /* Helper function scan_omp.
3836 Callback for walk_tree or operators in walk_gimple_stmt used to
3837 scan for OMP directives in TP. */
3839 static tree
3840 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3842 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3843 omp_context *ctx = (omp_context *) wi->info;
3844 tree t = *tp;
3846 switch (TREE_CODE (t))
3848 case VAR_DECL:
3849 case PARM_DECL:
3850 case LABEL_DECL:
3851 case RESULT_DECL:
3852 if (ctx)
3854 tree repl = remap_decl (t, &ctx->cb);
3855 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3856 *tp = repl;
3858 break;
3860 default:
3861 if (ctx && TYPE_P (t))
3862 *tp = remap_type (t, &ctx->cb);
3863 else if (!DECL_P (t))
3865 *walk_subtrees = 1;
3866 if (ctx)
3868 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3869 if (tem != TREE_TYPE (t))
3871 if (TREE_CODE (t) == INTEGER_CST)
3872 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3873 else
3874 TREE_TYPE (t) = tem;
3878 break;
3881 return NULL_TREE;
3884 /* Return true if FNDECL is a setjmp or a longjmp. */
3886 static bool
3887 setjmp_or_longjmp_p (const_tree fndecl)
3889 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3890 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3891 return true;
3893 tree declname = DECL_NAME (fndecl);
3894 if (!declname
3895 || (DECL_CONTEXT (fndecl) != NULL_TREE
3896 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3897 || !TREE_PUBLIC (fndecl))
3898 return false;
3900 const char *name = IDENTIFIER_POINTER (declname);
3901 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3904 /* Return true if FNDECL is an omp_* runtime API call. */
3906 static bool
3907 omp_runtime_api_call (const_tree fndecl)
3909 tree declname = DECL_NAME (fndecl);
3910 if (!declname
3911 || (DECL_CONTEXT (fndecl) != NULL_TREE
3912 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3913 || !TREE_PUBLIC (fndecl))
3914 return false;
3916 const char *name = IDENTIFIER_POINTER (declname);
3917 if (!startswith (name, "omp_"))
3918 return false;
3920 static const char *omp_runtime_apis[] =
3922 /* This array has 3 sections. First omp_* calls that don't
3923 have any suffixes. */
3924 "aligned_alloc",
3925 "aligned_calloc",
3926 "alloc",
3927 "calloc",
3928 "free",
3929 "realloc",
3930 "target_alloc",
3931 "target_associate_ptr",
3932 "target_disassociate_ptr",
3933 "target_free",
3934 "target_is_present",
3935 "target_memcpy",
3936 "target_memcpy_rect",
3937 NULL,
3938 /* Now omp_* calls that are available as omp_* and omp_*_. */
3939 "capture_affinity",
3940 "destroy_allocator",
3941 "destroy_lock",
3942 "destroy_nest_lock",
3943 "display_affinity",
3944 "fulfill_event",
3945 "get_active_level",
3946 "get_affinity_format",
3947 "get_cancellation",
3948 "get_default_allocator",
3949 "get_default_device",
3950 "get_device_num",
3951 "get_dynamic",
3952 "get_initial_device",
3953 "get_level",
3954 "get_max_active_levels",
3955 "get_max_task_priority",
3956 "get_max_threads",
3957 "get_nested",
3958 "get_num_devices",
3959 "get_num_places",
3960 "get_num_procs",
3961 "get_num_teams",
3962 "get_num_threads",
3963 "get_partition_num_places",
3964 "get_place_num",
3965 "get_proc_bind",
3966 "get_supported_active_levels",
3967 "get_team_num",
3968 "get_thread_limit",
3969 "get_thread_num",
3970 "get_wtick",
3971 "get_wtime",
3972 "in_final",
3973 "in_parallel",
3974 "init_lock",
3975 "init_nest_lock",
3976 "is_initial_device",
3977 "pause_resource",
3978 "pause_resource_all",
3979 "set_affinity_format",
3980 "set_default_allocator",
3981 "set_lock",
3982 "set_nest_lock",
3983 "test_lock",
3984 "test_nest_lock",
3985 "unset_lock",
3986 "unset_nest_lock",
3987 NULL,
3988 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3989 "display_env",
3990 "get_ancestor_thread_num",
3991 "init_allocator",
3992 "get_partition_place_nums",
3993 "get_place_num_procs",
3994 "get_place_proc_ids",
3995 "get_schedule",
3996 "get_team_size",
3997 "set_default_device",
3998 "set_dynamic",
3999 "set_max_active_levels",
4000 "set_nested",
4001 "set_num_threads",
4002 "set_schedule"
4005 int mode = 0;
4006 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
4008 if (omp_runtime_apis[i] == NULL)
4010 mode++;
4011 continue;
4013 size_t len = strlen (omp_runtime_apis[i]);
4014 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
4015 && (name[4 + len] == '\0'
4016 || (mode > 0
4017 && name[4 + len] == '_'
4018 && (name[4 + len + 1] == '\0'
4019 || (mode > 1
4020 && strcmp (name + 4 + len + 1, "8_") == 0)))))
4021 return true;
4023 return false;
4026 /* Helper function for scan_omp.
4028 Callback for walk_gimple_stmt used to scan for OMP directives in
4029 the current statement in GSI. */
4031 static tree
4032 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4033 struct walk_stmt_info *wi)
4035 gimple *stmt = gsi_stmt (*gsi);
4036 omp_context *ctx = (omp_context *) wi->info;
4038 if (gimple_has_location (stmt))
4039 input_location = gimple_location (stmt);
4041 /* Check the nesting restrictions. */
4042 bool remove = false;
4043 if (is_gimple_omp (stmt))
4044 remove = !check_omp_nesting_restrictions (stmt, ctx);
4045 else if (is_gimple_call (stmt))
4047 tree fndecl = gimple_call_fndecl (stmt);
4048 if (fndecl)
4050 if (ctx
4051 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4052 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4053 && setjmp_or_longjmp_p (fndecl)
4054 && !ctx->loop_p)
4056 remove = true;
4057 error_at (gimple_location (stmt),
4058 "setjmp/longjmp inside %<simd%> construct");
4060 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4061 switch (DECL_FUNCTION_CODE (fndecl))
4063 case BUILT_IN_GOMP_BARRIER:
4064 case BUILT_IN_GOMP_CANCEL:
4065 case BUILT_IN_GOMP_CANCELLATION_POINT:
4066 case BUILT_IN_GOMP_TASKYIELD:
4067 case BUILT_IN_GOMP_TASKWAIT:
4068 case BUILT_IN_GOMP_TASKGROUP_START:
4069 case BUILT_IN_GOMP_TASKGROUP_END:
4070 remove = !check_omp_nesting_restrictions (stmt, ctx);
4071 break;
4072 default:
4073 break;
4075 else if (ctx)
4077 omp_context *octx = ctx;
4078 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4079 octx = ctx->outer;
4080 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4082 remove = true;
4083 error_at (gimple_location (stmt),
4084 "OpenMP runtime API call %qD in a region with "
4085 "%<order(concurrent)%> clause", fndecl);
4087 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
4088 && (gimple_omp_target_kind (ctx->stmt)
4089 == GF_OMP_TARGET_KIND_REGION))
4091 tree tgt_clauses = gimple_omp_target_clauses (ctx->stmt);
4092 tree c = omp_find_clause (tgt_clauses, OMP_CLAUSE_DEVICE);
4093 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4094 error_at (gimple_location (stmt),
4095 "OpenMP runtime API call %qD in a region with "
4096 "%<device(ancestor)%> clause", fndecl);
4101 if (remove)
4103 stmt = gimple_build_nop ();
4104 gsi_replace (gsi, stmt, false);
4107 *handled_ops_p = true;
4109 switch (gimple_code (stmt))
4111 case GIMPLE_OMP_PARALLEL:
4112 taskreg_nesting_level++;
4113 scan_omp_parallel (gsi, ctx);
4114 taskreg_nesting_level--;
4115 break;
4117 case GIMPLE_OMP_TASK:
4118 taskreg_nesting_level++;
4119 scan_omp_task (gsi, ctx);
4120 taskreg_nesting_level--;
4121 break;
4123 case GIMPLE_OMP_FOR:
4124 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4125 == GF_OMP_FOR_KIND_SIMD)
4126 && gimple_omp_for_combined_into_p (stmt)
4127 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4129 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4130 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4131 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4133 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4134 break;
4137 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4138 == GF_OMP_FOR_KIND_SIMD)
4139 && omp_maybe_offloaded_ctx (ctx)
4140 && omp_max_simt_vf ()
4141 && gimple_omp_for_collapse (stmt) == 1)
4142 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4143 else
4144 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4145 break;
4147 case GIMPLE_OMP_SCOPE:
4148 ctx = new_omp_context (stmt, ctx);
4149 scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4150 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4151 break;
4153 case GIMPLE_OMP_SECTIONS:
4154 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4155 break;
4157 case GIMPLE_OMP_SINGLE:
4158 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4159 break;
4161 case GIMPLE_OMP_SCAN:
4162 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4164 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4165 ctx->scan_inclusive = true;
4166 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4167 ctx->scan_exclusive = true;
4169 /* FALLTHRU */
4170 case GIMPLE_OMP_SECTION:
4171 case GIMPLE_OMP_MASTER:
4172 case GIMPLE_OMP_ORDERED:
4173 case GIMPLE_OMP_CRITICAL:
4174 ctx = new_omp_context (stmt, ctx);
4175 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4176 break;
4178 case GIMPLE_OMP_MASKED:
4179 ctx = new_omp_context (stmt, ctx);
4180 scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4181 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4182 break;
4184 case GIMPLE_OMP_TASKGROUP:
4185 ctx = new_omp_context (stmt, ctx);
4186 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4187 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4188 break;
4190 case GIMPLE_OMP_TARGET:
4191 if (is_gimple_omp_offloaded (stmt))
4193 taskreg_nesting_level++;
4194 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4195 taskreg_nesting_level--;
4197 else
4198 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4199 break;
4201 case GIMPLE_OMP_TEAMS:
4202 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4204 taskreg_nesting_level++;
4205 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4206 taskreg_nesting_level--;
4208 else
4209 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4210 break;
4212 case GIMPLE_BIND:
4214 tree var;
4216 *handled_ops_p = false;
4217 if (ctx)
4218 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4219 var ;
4220 var = DECL_CHAIN (var))
4221 insert_decl_map (&ctx->cb, var, var);
4223 break;
4224 default:
4225 *handled_ops_p = false;
4226 break;
4229 return NULL_TREE;
4233 /* Scan all the statements starting at the current statement. CTX
4234 contains context information about the OMP directives and
4235 clauses found during the scan. */
4237 static void
4238 scan_omp (gimple_seq *body_p, omp_context *ctx)
4240 location_t saved_location;
4241 struct walk_stmt_info wi;
4243 memset (&wi, 0, sizeof (wi));
4244 wi.info = ctx;
4245 wi.want_locations = true;
4247 saved_location = input_location;
4248 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4249 input_location = saved_location;
4252 /* Re-gimplification and code generation routines. */
4254 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4255 of BIND if in a method. */
4257 static void
4258 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4260 if (DECL_ARGUMENTS (current_function_decl)
4261 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4262 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4263 == POINTER_TYPE))
4265 tree vars = gimple_bind_vars (bind);
4266 for (tree *pvar = &vars; *pvar; )
4267 if (omp_member_access_dummy_var (*pvar))
4268 *pvar = DECL_CHAIN (*pvar);
4269 else
4270 pvar = &DECL_CHAIN (*pvar);
4271 gimple_bind_set_vars (bind, vars);
4275 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4276 block and its subblocks. */
4278 static void
4279 remove_member_access_dummy_vars (tree block)
4281 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4282 if (omp_member_access_dummy_var (*pvar))
4283 *pvar = DECL_CHAIN (*pvar);
4284 else
4285 pvar = &DECL_CHAIN (*pvar);
4287 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4288 remove_member_access_dummy_vars (block);
4291 /* If a context was created for STMT when it was scanned, return it. */
4293 static omp_context *
4294 maybe_lookup_ctx (gimple *stmt)
4296 splay_tree_node n;
4297 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4298 return n ? (omp_context *) n->value : NULL;
4302 /* Find the mapping for DECL in CTX or the immediately enclosing
4303 context that has a mapping for DECL.
4305 If CTX is a nested parallel directive, we may have to use the decl
4306 mappings created in CTX's parent context. Suppose that we have the
4307 following parallel nesting (variable UIDs showed for clarity):
4309 iD.1562 = 0;
4310 #omp parallel shared(iD.1562) -> outer parallel
4311 iD.1562 = iD.1562 + 1;
4313 #omp parallel shared (iD.1562) -> inner parallel
4314 iD.1562 = iD.1562 - 1;
4316 Each parallel structure will create a distinct .omp_data_s structure
4317 for copying iD.1562 in/out of the directive:
4319 outer parallel .omp_data_s.1.i -> iD.1562
4320 inner parallel .omp_data_s.2.i -> iD.1562
4322 A shared variable mapping will produce a copy-out operation before
4323 the parallel directive and a copy-in operation after it. So, in
4324 this case we would have:
4326 iD.1562 = 0;
4327 .omp_data_o.1.i = iD.1562;
4328 #omp parallel shared(iD.1562) -> outer parallel
4329 .omp_data_i.1 = &.omp_data_o.1
4330 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4332 .omp_data_o.2.i = iD.1562; -> **
4333 #omp parallel shared(iD.1562) -> inner parallel
4334 .omp_data_i.2 = &.omp_data_o.2
4335 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4338 ** This is a problem. The symbol iD.1562 cannot be referenced
4339 inside the body of the outer parallel region. But since we are
4340 emitting this copy operation while expanding the inner parallel
4341 directive, we need to access the CTX structure of the outer
4342 parallel directive to get the correct mapping:
4344 .omp_data_o.2.i = .omp_data_i.1->i
4346 Since there may be other workshare or parallel directives enclosing
4347 the parallel directive, it may be necessary to walk up the context
4348 parent chain. This is not a problem in general because nested
4349 parallelism happens only rarely. */
4351 static tree
4352 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4354 tree t;
4355 omp_context *up;
4357 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4358 t = maybe_lookup_decl (decl, up);
4360 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4362 return t ? t : decl;
4366 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4367 in outer contexts. */
4369 static tree
4370 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4372 tree t = NULL;
4373 omp_context *up;
4375 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4376 t = maybe_lookup_decl (decl, up);
4378 return t ? t : decl;
4382 /* Construct the initialization value for reduction operation OP. */
4384 tree
4385 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4387 switch (op)
4389 case PLUS_EXPR:
4390 case MINUS_EXPR:
4391 case BIT_IOR_EXPR:
4392 case BIT_XOR_EXPR:
4393 case TRUTH_OR_EXPR:
4394 case TRUTH_ORIF_EXPR:
4395 case TRUTH_XOR_EXPR:
4396 case NE_EXPR:
4397 return build_zero_cst (type);
4399 case MULT_EXPR:
4400 case TRUTH_AND_EXPR:
4401 case TRUTH_ANDIF_EXPR:
4402 case EQ_EXPR:
4403 return fold_convert_loc (loc, type, integer_one_node);
4405 case BIT_AND_EXPR:
4406 return fold_convert_loc (loc, type, integer_minus_one_node);
4408 case MAX_EXPR:
4409 if (SCALAR_FLOAT_TYPE_P (type))
4411 REAL_VALUE_TYPE max, min;
4412 if (HONOR_INFINITIES (type))
4414 real_inf (&max);
4415 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4417 else
4418 real_maxval (&min, 1, TYPE_MODE (type));
4419 return build_real (type, min);
4421 else if (POINTER_TYPE_P (type))
4423 wide_int min
4424 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4425 return wide_int_to_tree (type, min);
4427 else
4429 gcc_assert (INTEGRAL_TYPE_P (type));
4430 return TYPE_MIN_VALUE (type);
4433 case MIN_EXPR:
4434 if (SCALAR_FLOAT_TYPE_P (type))
4436 REAL_VALUE_TYPE max;
4437 if (HONOR_INFINITIES (type))
4438 real_inf (&max);
4439 else
4440 real_maxval (&max, 0, TYPE_MODE (type));
4441 return build_real (type, max);
4443 else if (POINTER_TYPE_P (type))
4445 wide_int max
4446 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4447 return wide_int_to_tree (type, max);
4449 else
4451 gcc_assert (INTEGRAL_TYPE_P (type));
4452 return TYPE_MAX_VALUE (type);
4455 default:
4456 gcc_unreachable ();
4460 /* Construct the initialization value for reduction CLAUSE. */
4462 tree
4463 omp_reduction_init (tree clause, tree type)
4465 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4466 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4469 /* Return alignment to be assumed for var in CLAUSE, which should be
4470 OMP_CLAUSE_ALIGNED. */
4472 static tree
4473 omp_clause_aligned_alignment (tree clause)
4475 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4476 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4478 /* Otherwise return implementation defined alignment. */
4479 unsigned int al = 1;
4480 opt_scalar_mode mode_iter;
4481 auto_vector_modes modes;
4482 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4483 static enum mode_class classes[]
4484 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4485 for (int i = 0; i < 4; i += 2)
4486 /* The for loop above dictates that we only walk through scalar classes. */
4487 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4489 scalar_mode mode = mode_iter.require ();
4490 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4491 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4492 continue;
4493 machine_mode alt_vmode;
4494 for (unsigned int j = 0; j < modes.length (); ++j)
4495 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4496 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4497 vmode = alt_vmode;
4499 tree type = lang_hooks.types.type_for_mode (mode, 1);
4500 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4501 continue;
4502 type = build_vector_type_for_mode (type, vmode);
4503 if (TYPE_MODE (type) != vmode)
4504 continue;
4505 if (TYPE_ALIGN_UNIT (type) > al)
4506 al = TYPE_ALIGN_UNIT (type);
4508 return build_int_cst (integer_type_node, al);
4512 /* This structure is part of the interface between lower_rec_simd_input_clauses
4513 and lower_rec_input_clauses. */
4515 class omplow_simd_context {
4516 public:
4517 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4518 tree idx;
4519 tree lane;
4520 tree lastlane;
4521 vec<tree, va_heap> simt_eargs;
4522 gimple_seq simt_dlist;
4523 poly_uint64_pod max_vf;
4524 bool is_simt;
4527 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4528 privatization. */
4530 static bool
4531 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4532 omplow_simd_context *sctx, tree &ivar,
4533 tree &lvar, tree *rvar = NULL,
4534 tree *rvar2 = NULL)
4536 if (known_eq (sctx->max_vf, 0U))
4538 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4539 if (maybe_gt (sctx->max_vf, 1U))
4541 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4542 OMP_CLAUSE_SAFELEN);
4543 if (c)
4545 poly_uint64 safe_len;
4546 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4547 || maybe_lt (safe_len, 1U))
4548 sctx->max_vf = 1;
4549 else
4550 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4553 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4555 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4556 c = OMP_CLAUSE_CHAIN (c))
4558 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4559 continue;
4561 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4563 /* UDR reductions are not supported yet for SIMT, disable
4564 SIMT. */
4565 sctx->max_vf = 1;
4566 break;
4569 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4570 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4572 /* Doing boolean operations on non-integral types is
4573 for conformance only, it's not worth supporting this
4574 for SIMT. */
4575 sctx->max_vf = 1;
4576 break;
4580 if (maybe_gt (sctx->max_vf, 1U))
4582 sctx->idx = create_tmp_var (unsigned_type_node);
4583 sctx->lane = create_tmp_var (unsigned_type_node);
4586 if (known_eq (sctx->max_vf, 1U))
4587 return false;
4589 if (sctx->is_simt)
4591 if (is_gimple_reg (new_var))
4593 ivar = lvar = new_var;
4594 return true;
4596 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4597 ivar = lvar = create_tmp_var (type);
4598 TREE_ADDRESSABLE (ivar) = 1;
4599 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4600 NULL, DECL_ATTRIBUTES (ivar));
4601 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4602 tree clobber = build_clobber (type);
4603 gimple *g = gimple_build_assign (ivar, clobber);
4604 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4606 else
4608 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4609 tree avar = create_tmp_var_raw (atype);
4610 if (TREE_ADDRESSABLE (new_var))
4611 TREE_ADDRESSABLE (avar) = 1;
4612 DECL_ATTRIBUTES (avar)
4613 = tree_cons (get_identifier ("omp simd array"), NULL,
4614 DECL_ATTRIBUTES (avar));
4615 gimple_add_tmp_var (avar);
4616 tree iavar = avar;
4617 if (rvar && !ctx->for_simd_scan_phase)
4619 /* For inscan reductions, create another array temporary,
4620 which will hold the reduced value. */
4621 iavar = create_tmp_var_raw (atype);
4622 if (TREE_ADDRESSABLE (new_var))
4623 TREE_ADDRESSABLE (iavar) = 1;
4624 DECL_ATTRIBUTES (iavar)
4625 = tree_cons (get_identifier ("omp simd array"), NULL,
4626 tree_cons (get_identifier ("omp simd inscan"), NULL,
4627 DECL_ATTRIBUTES (iavar)));
4628 gimple_add_tmp_var (iavar);
4629 ctx->cb.decl_map->put (avar, iavar);
4630 if (sctx->lastlane == NULL_TREE)
4631 sctx->lastlane = create_tmp_var (unsigned_type_node);
4632 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4633 sctx->lastlane, NULL_TREE, NULL_TREE);
4634 TREE_THIS_NOTRAP (*rvar) = 1;
4636 if (ctx->scan_exclusive)
4638 /* And for exclusive scan yet another one, which will
4639 hold the value during the scan phase. */
4640 tree savar = create_tmp_var_raw (atype);
4641 if (TREE_ADDRESSABLE (new_var))
4642 TREE_ADDRESSABLE (savar) = 1;
4643 DECL_ATTRIBUTES (savar)
4644 = tree_cons (get_identifier ("omp simd array"), NULL,
4645 tree_cons (get_identifier ("omp simd inscan "
4646 "exclusive"), NULL,
4647 DECL_ATTRIBUTES (savar)));
4648 gimple_add_tmp_var (savar);
4649 ctx->cb.decl_map->put (iavar, savar);
4650 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4651 sctx->idx, NULL_TREE, NULL_TREE);
4652 TREE_THIS_NOTRAP (*rvar2) = 1;
4655 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4656 NULL_TREE, NULL_TREE);
4657 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4658 NULL_TREE, NULL_TREE);
4659 TREE_THIS_NOTRAP (ivar) = 1;
4660 TREE_THIS_NOTRAP (lvar) = 1;
4662 if (DECL_P (new_var))
4664 SET_DECL_VALUE_EXPR (new_var, lvar);
4665 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4667 return true;
4670 /* Helper function of lower_rec_input_clauses. For a reference
4671 in simd reduction, add an underlying variable it will reference. */
4673 static void
4674 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4676 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4677 if (TREE_CONSTANT (z))
4679 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4680 get_name (new_vard));
4681 gimple_add_tmp_var (z);
4682 TREE_ADDRESSABLE (z) = 1;
4683 z = build_fold_addr_expr_loc (loc, z);
4684 gimplify_assign (new_vard, z, ilist);
4688 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4689 code to emit (type) (tskred_temp[idx]). */
4691 static tree
4692 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4693 unsigned idx)
4695 unsigned HOST_WIDE_INT sz
4696 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4697 tree r = build2 (MEM_REF, pointer_sized_int_node,
4698 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4699 idx * sz));
4700 tree v = create_tmp_var (pointer_sized_int_node);
4701 gimple *g = gimple_build_assign (v, r);
4702 gimple_seq_add_stmt (ilist, g);
4703 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4705 v = create_tmp_var (type);
4706 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4707 gimple_seq_add_stmt (ilist, g);
4709 return v;
4712 /* Lower early initialization of privatized variable NEW_VAR
4713 if it needs an allocator (has allocate clause). */
4715 static bool
4716 lower_private_allocate (tree var, tree new_var, tree &allocator,
4717 tree &allocate_ptr, gimple_seq *ilist,
4718 omp_context *ctx, bool is_ref, tree size)
4720 if (allocator)
4721 return false;
4722 gcc_assert (allocate_ptr == NULL_TREE);
4723 if (ctx->allocate_map
4724 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4725 if (tree *allocatorp = ctx->allocate_map->get (var))
4726 allocator = *allocatorp;
4727 if (allocator == NULL_TREE)
4728 return false;
4729 if (!is_ref && omp_privatize_by_reference (var))
4731 allocator = NULL_TREE;
4732 return false;
4735 unsigned HOST_WIDE_INT ialign = 0;
4736 if (TREE_CODE (allocator) == TREE_LIST)
4738 ialign = tree_to_uhwi (TREE_VALUE (allocator));
4739 allocator = TREE_PURPOSE (allocator);
4741 if (TREE_CODE (allocator) != INTEGER_CST)
4742 allocator = build_outer_var_ref (allocator, ctx);
4743 allocator = fold_convert (pointer_sized_int_node, allocator);
4744 if (TREE_CODE (allocator) != INTEGER_CST)
4746 tree var = create_tmp_var (TREE_TYPE (allocator));
4747 gimplify_assign (var, allocator, ilist);
4748 allocator = var;
4751 tree ptr_type, align, sz = size;
4752 if (TYPE_P (new_var))
4754 ptr_type = build_pointer_type (new_var);
4755 ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4757 else if (is_ref)
4759 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4760 ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4762 else
4764 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4765 ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4766 if (sz == NULL_TREE)
4767 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4769 align = build_int_cst (size_type_node, ialign);
4770 if (TREE_CODE (sz) != INTEGER_CST)
4772 tree szvar = create_tmp_var (size_type_node);
4773 gimplify_assign (szvar, sz, ilist);
4774 sz = szvar;
4776 allocate_ptr = create_tmp_var (ptr_type);
4777 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4778 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4779 gimple_call_set_lhs (g, allocate_ptr);
4780 gimple_seq_add_stmt (ilist, g);
4781 if (!is_ref)
4783 tree x = build_simple_mem_ref (allocate_ptr);
4784 TREE_THIS_NOTRAP (x) = 1;
4785 SET_DECL_VALUE_EXPR (new_var, x);
4786 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4788 return true;
4791 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4792 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4793 private variables. Initialization statements go in ILIST, while calls
4794 to destructors go in DLIST. */
4796 static void
4797 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4798 omp_context *ctx, struct omp_for_data *fd)
4800 tree c, copyin_seq, x, ptr;
4801 bool copyin_by_ref = false;
4802 bool lastprivate_firstprivate = false;
4803 bool reduction_omp_orig_ref = false;
4804 int pass;
4805 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4806 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4807 omplow_simd_context sctx = omplow_simd_context ();
4808 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4809 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4810 gimple_seq llist[4] = { };
4811 tree nonconst_simd_if = NULL_TREE;
4813 copyin_seq = NULL;
4814 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4816 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4817 with data sharing clauses referencing variable sized vars. That
4818 is unnecessarily hard to support and very unlikely to result in
4819 vectorized code anyway. */
4820 if (is_simd)
4821 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4822 switch (OMP_CLAUSE_CODE (c))
4824 case OMP_CLAUSE_LINEAR:
4825 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4826 sctx.max_vf = 1;
4827 /* FALLTHRU */
4828 case OMP_CLAUSE_PRIVATE:
4829 case OMP_CLAUSE_FIRSTPRIVATE:
4830 case OMP_CLAUSE_LASTPRIVATE:
4831 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4832 sctx.max_vf = 1;
4833 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4835 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4836 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4837 sctx.max_vf = 1;
4839 break;
4840 case OMP_CLAUSE_REDUCTION:
4841 case OMP_CLAUSE_IN_REDUCTION:
4842 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4843 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4844 sctx.max_vf = 1;
4845 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4847 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4848 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4849 sctx.max_vf = 1;
4851 break;
4852 case OMP_CLAUSE_IF:
4853 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4854 sctx.max_vf = 1;
4855 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4856 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4857 break;
4858 case OMP_CLAUSE_SIMDLEN:
4859 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4860 sctx.max_vf = 1;
4861 break;
4862 case OMP_CLAUSE__CONDTEMP_:
4863 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4864 if (sctx.is_simt)
4865 sctx.max_vf = 1;
4866 break;
4867 default:
4868 continue;
4871 /* Add a placeholder for simduid. */
4872 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4873 sctx.simt_eargs.safe_push (NULL_TREE);
4875 unsigned task_reduction_cnt = 0;
4876 unsigned task_reduction_cntorig = 0;
4877 unsigned task_reduction_cnt_full = 0;
4878 unsigned task_reduction_cntorig_full = 0;
4879 unsigned task_reduction_other_cnt = 0;
4880 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4881 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4882 /* Do all the fixed sized types in the first pass, and the variable sized
4883 types in the second pass. This makes sure that the scalar arguments to
4884 the variable sized types are processed before we use them in the
4885 variable sized operations. For task reductions we use 4 passes, in the
4886 first two we ignore them, in the third one gather arguments for
4887 GOMP_task_reduction_remap call and in the last pass actually handle
4888 the task reductions. */
4889 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4890 ? 4 : 2); ++pass)
4892 if (pass == 2 && task_reduction_cnt)
4894 tskred_atype
4895 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4896 + task_reduction_cntorig);
4897 tskred_avar = create_tmp_var_raw (tskred_atype);
4898 gimple_add_tmp_var (tskred_avar);
4899 TREE_ADDRESSABLE (tskred_avar) = 1;
4900 task_reduction_cnt_full = task_reduction_cnt;
4901 task_reduction_cntorig_full = task_reduction_cntorig;
4903 else if (pass == 3 && task_reduction_cnt)
4905 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4906 gimple *g
4907 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4908 size_int (task_reduction_cntorig),
4909 build_fold_addr_expr (tskred_avar));
4910 gimple_seq_add_stmt (ilist, g);
4912 if (pass == 3 && task_reduction_other_cnt)
4914 /* For reduction clauses, build
4915 tskred_base = (void *) tskred_temp[2]
4916 + omp_get_thread_num () * tskred_temp[1]
4917 or if tskred_temp[1] is known to be constant, that constant
4918 directly. This is the start of the private reduction copy block
4919 for the current thread. */
4920 tree v = create_tmp_var (integer_type_node);
4921 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4922 gimple *g = gimple_build_call (x, 0);
4923 gimple_call_set_lhs (g, v);
4924 gimple_seq_add_stmt (ilist, g);
4925 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4926 tskred_temp = OMP_CLAUSE_DECL (c);
4927 if (is_taskreg_ctx (ctx))
4928 tskred_temp = lookup_decl (tskred_temp, ctx);
4929 tree v2 = create_tmp_var (sizetype);
4930 g = gimple_build_assign (v2, NOP_EXPR, v);
4931 gimple_seq_add_stmt (ilist, g);
4932 if (ctx->task_reductions[0])
4933 v = fold_convert (sizetype, ctx->task_reductions[0]);
4934 else
4935 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4936 tree v3 = create_tmp_var (sizetype);
4937 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4938 gimple_seq_add_stmt (ilist, g);
4939 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4940 tskred_base = create_tmp_var (ptr_type_node);
4941 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4942 gimple_seq_add_stmt (ilist, g);
4944 task_reduction_cnt = 0;
4945 task_reduction_cntorig = 0;
4946 task_reduction_other_cnt = 0;
4947 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4949 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4950 tree var, new_var;
4951 bool by_ref;
4952 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4953 bool task_reduction_p = false;
4954 bool task_reduction_needs_orig_p = false;
4955 tree cond = NULL_TREE;
4956 tree allocator, allocate_ptr;
4958 switch (c_kind)
4960 case OMP_CLAUSE_PRIVATE:
4961 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4962 continue;
4963 break;
4964 case OMP_CLAUSE_SHARED:
4965 /* Ignore shared directives in teams construct inside
4966 of target construct. */
4967 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4968 && !is_host_teams_ctx (ctx))
4969 continue;
4970 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4972 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4973 || is_global_var (OMP_CLAUSE_DECL (c)));
4974 continue;
4976 case OMP_CLAUSE_FIRSTPRIVATE:
4977 case OMP_CLAUSE_COPYIN:
4978 break;
4979 case OMP_CLAUSE_LINEAR:
4980 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4981 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4982 lastprivate_firstprivate = true;
4983 break;
4984 case OMP_CLAUSE_REDUCTION:
4985 case OMP_CLAUSE_IN_REDUCTION:
4986 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
4987 || is_task_ctx (ctx)
4988 || OMP_CLAUSE_REDUCTION_TASK (c))
4990 task_reduction_p = true;
4991 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4993 task_reduction_other_cnt++;
4994 if (pass == 2)
4995 continue;
4997 else
4998 task_reduction_cnt++;
4999 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5001 var = OMP_CLAUSE_DECL (c);
5002 /* If var is a global variable that isn't privatized
5003 in outer contexts, we don't need to look up the
5004 original address, it is always the address of the
5005 global variable itself. */
5006 if (!DECL_P (var)
5007 || omp_privatize_by_reference (var)
5008 || !is_global_var
5009 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5011 task_reduction_needs_orig_p = true;
5012 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5013 task_reduction_cntorig++;
5017 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5018 reduction_omp_orig_ref = true;
5019 break;
5020 case OMP_CLAUSE__REDUCTEMP_:
5021 if (!is_taskreg_ctx (ctx))
5022 continue;
5023 /* FALLTHRU */
5024 case OMP_CLAUSE__LOOPTEMP_:
5025 /* Handle _looptemp_/_reductemp_ clauses only on
5026 parallel/task. */
5027 if (fd)
5028 continue;
5029 break;
5030 case OMP_CLAUSE_LASTPRIVATE:
5031 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5033 lastprivate_firstprivate = true;
5034 if (pass != 0 || is_taskloop_ctx (ctx))
5035 continue;
5037 /* Even without corresponding firstprivate, if
5038 decl is Fortran allocatable, it needs outer var
5039 reference. */
5040 else if (pass == 0
5041 && lang_hooks.decls.omp_private_outer_ref
5042 (OMP_CLAUSE_DECL (c)))
5043 lastprivate_firstprivate = true;
5044 break;
5045 case OMP_CLAUSE_ALIGNED:
5046 if (pass != 1)
5047 continue;
5048 var = OMP_CLAUSE_DECL (c);
5049 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5050 && !is_global_var (var))
5052 new_var = maybe_lookup_decl (var, ctx);
5053 if (new_var == NULL_TREE)
5054 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5055 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5056 tree alarg = omp_clause_aligned_alignment (c);
5057 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5058 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5059 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5060 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5061 gimplify_and_add (x, ilist);
5063 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5064 && is_global_var (var))
5066 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5067 new_var = lookup_decl (var, ctx);
5068 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5069 t = build_fold_addr_expr_loc (clause_loc, t);
5070 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5071 tree alarg = omp_clause_aligned_alignment (c);
5072 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5073 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5074 t = fold_convert_loc (clause_loc, ptype, t);
5075 x = create_tmp_var (ptype);
5076 t = build2 (MODIFY_EXPR, ptype, x, t);
5077 gimplify_and_add (t, ilist);
5078 t = build_simple_mem_ref_loc (clause_loc, x);
5079 SET_DECL_VALUE_EXPR (new_var, t);
5080 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5082 continue;
5083 case OMP_CLAUSE__CONDTEMP_:
5084 if (is_parallel_ctx (ctx)
5085 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5086 break;
5087 continue;
5088 default:
5089 continue;
5092 if (task_reduction_p != (pass >= 2))
5093 continue;
5095 allocator = NULL_TREE;
5096 allocate_ptr = NULL_TREE;
5097 new_var = var = OMP_CLAUSE_DECL (c);
5098 if ((c_kind == OMP_CLAUSE_REDUCTION
5099 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5100 && TREE_CODE (var) == MEM_REF)
5102 var = TREE_OPERAND (var, 0);
5103 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5104 var = TREE_OPERAND (var, 0);
5105 if (TREE_CODE (var) == INDIRECT_REF
5106 || TREE_CODE (var) == ADDR_EXPR)
5107 var = TREE_OPERAND (var, 0);
5108 if (is_variable_sized (var))
5110 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5111 var = DECL_VALUE_EXPR (var);
5112 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5113 var = TREE_OPERAND (var, 0);
5114 gcc_assert (DECL_P (var));
5116 new_var = var;
5118 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5120 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5121 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5123 else if (c_kind != OMP_CLAUSE_COPYIN)
5124 new_var = lookup_decl (var, ctx);
5126 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5128 if (pass != 0)
5129 continue;
5131 /* C/C++ array section reductions. */
5132 else if ((c_kind == OMP_CLAUSE_REDUCTION
5133 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5134 && var != OMP_CLAUSE_DECL (c))
5136 if (pass == 0)
5137 continue;
5139 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5140 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5142 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5144 tree b = TREE_OPERAND (orig_var, 1);
5145 if (is_omp_target (ctx->stmt))
5146 b = NULL_TREE;
5147 else
5148 b = maybe_lookup_decl (b, ctx);
5149 if (b == NULL)
5151 b = TREE_OPERAND (orig_var, 1);
5152 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5154 if (integer_zerop (bias))
5155 bias = b;
5156 else
5158 bias = fold_convert_loc (clause_loc,
5159 TREE_TYPE (b), bias);
5160 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5161 TREE_TYPE (b), b, bias);
5163 orig_var = TREE_OPERAND (orig_var, 0);
5165 if (pass == 2)
5167 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5168 if (is_global_var (out)
5169 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5170 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5171 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5172 != POINTER_TYPE)))
5173 x = var;
5174 else if (is_omp_target (ctx->stmt))
5175 x = out;
5176 else
5178 bool by_ref = use_pointer_for_field (var, NULL);
5179 x = build_receiver_ref (var, by_ref, ctx);
5180 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5181 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5182 == POINTER_TYPE))
5183 x = build_fold_addr_expr (x);
5185 if (TREE_CODE (orig_var) == INDIRECT_REF)
5186 x = build_simple_mem_ref (x);
5187 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5189 if (var == TREE_OPERAND (orig_var, 0))
5190 x = build_fold_addr_expr (x);
5192 bias = fold_convert (sizetype, bias);
5193 x = fold_convert (ptr_type_node, x);
5194 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5195 TREE_TYPE (x), x, bias);
5196 unsigned cnt = task_reduction_cnt - 1;
5197 if (!task_reduction_needs_orig_p)
5198 cnt += (task_reduction_cntorig_full
5199 - task_reduction_cntorig);
5200 else
5201 cnt = task_reduction_cntorig - 1;
5202 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5203 size_int (cnt), NULL_TREE, NULL_TREE);
5204 gimplify_assign (r, x, ilist);
5205 continue;
5208 if (TREE_CODE (orig_var) == INDIRECT_REF
5209 || TREE_CODE (orig_var) == ADDR_EXPR)
5210 orig_var = TREE_OPERAND (orig_var, 0);
5211 tree d = OMP_CLAUSE_DECL (c);
5212 tree type = TREE_TYPE (d);
5213 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5214 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5215 tree sz = v;
5216 const char *name = get_name (orig_var);
5217 if (pass != 3 && !TREE_CONSTANT (v))
5219 tree t;
5220 if (is_omp_target (ctx->stmt))
5221 t = NULL_TREE;
5222 else
5223 t = maybe_lookup_decl (v, ctx);
5224 if (t)
5225 v = t;
5226 else
5227 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5228 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5229 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5230 TREE_TYPE (v), v,
5231 build_int_cst (TREE_TYPE (v), 1));
5232 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5233 TREE_TYPE (v), t,
5234 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5236 if (pass == 3)
5238 tree xv = create_tmp_var (ptr_type_node);
5239 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5241 unsigned cnt = task_reduction_cnt - 1;
5242 if (!task_reduction_needs_orig_p)
5243 cnt += (task_reduction_cntorig_full
5244 - task_reduction_cntorig);
5245 else
5246 cnt = task_reduction_cntorig - 1;
5247 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5248 size_int (cnt), NULL_TREE, NULL_TREE);
5250 gimple *g = gimple_build_assign (xv, x);
5251 gimple_seq_add_stmt (ilist, g);
5253 else
5255 unsigned int idx = *ctx->task_reduction_map->get (c);
5256 tree off;
5257 if (ctx->task_reductions[1 + idx])
5258 off = fold_convert (sizetype,
5259 ctx->task_reductions[1 + idx]);
5260 else
5261 off = task_reduction_read (ilist, tskred_temp, sizetype,
5262 7 + 3 * idx + 1);
5263 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5264 tskred_base, off);
5265 gimple_seq_add_stmt (ilist, g);
5267 x = fold_convert (build_pointer_type (boolean_type_node),
5268 xv);
5269 if (TREE_CONSTANT (v))
5270 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5271 TYPE_SIZE_UNIT (type));
5272 else
5274 tree t;
5275 if (is_omp_target (ctx->stmt))
5276 t = NULL_TREE;
5277 else
5278 t = maybe_lookup_decl (v, ctx);
5279 if (t)
5280 v = t;
5281 else
5282 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5283 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5284 fb_rvalue);
5285 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5286 TREE_TYPE (v), v,
5287 build_int_cst (TREE_TYPE (v), 1));
5288 t = fold_build2_loc (clause_loc, MULT_EXPR,
5289 TREE_TYPE (v), t,
5290 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5291 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5293 cond = create_tmp_var (TREE_TYPE (x));
5294 gimplify_assign (cond, x, ilist);
5295 x = xv;
5297 else if (lower_private_allocate (var, type, allocator,
5298 allocate_ptr, ilist, ctx,
5299 true,
5300 TREE_CONSTANT (v)
5301 ? TYPE_SIZE_UNIT (type)
5302 : sz))
5303 x = allocate_ptr;
5304 else if (TREE_CONSTANT (v))
5306 x = create_tmp_var_raw (type, name);
5307 gimple_add_tmp_var (x);
5308 TREE_ADDRESSABLE (x) = 1;
5309 x = build_fold_addr_expr_loc (clause_loc, x);
5311 else
5313 tree atmp
5314 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5315 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5316 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5319 tree ptype = build_pointer_type (TREE_TYPE (type));
5320 x = fold_convert_loc (clause_loc, ptype, x);
5321 tree y = create_tmp_var (ptype, name);
5322 gimplify_assign (y, x, ilist);
5323 x = y;
5324 tree yb = y;
5326 if (!integer_zerop (bias))
5328 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5329 bias);
5330 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5332 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5333 pointer_sized_int_node, yb, bias);
5334 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5335 yb = create_tmp_var (ptype, name);
5336 gimplify_assign (yb, x, ilist);
5337 x = yb;
5340 d = TREE_OPERAND (d, 0);
5341 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5342 d = TREE_OPERAND (d, 0);
5343 if (TREE_CODE (d) == ADDR_EXPR)
5345 if (orig_var != var)
5347 gcc_assert (is_variable_sized (orig_var));
5348 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5350 gimplify_assign (new_var, x, ilist);
5351 tree new_orig_var = lookup_decl (orig_var, ctx);
5352 tree t = build_fold_indirect_ref (new_var);
5353 DECL_IGNORED_P (new_var) = 0;
5354 TREE_THIS_NOTRAP (t) = 1;
5355 SET_DECL_VALUE_EXPR (new_orig_var, t);
5356 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5358 else
5360 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5361 build_int_cst (ptype, 0));
5362 SET_DECL_VALUE_EXPR (new_var, x);
5363 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5366 else
5368 gcc_assert (orig_var == var);
5369 if (TREE_CODE (d) == INDIRECT_REF)
5371 x = create_tmp_var (ptype, name);
5372 TREE_ADDRESSABLE (x) = 1;
5373 gimplify_assign (x, yb, ilist);
5374 x = build_fold_addr_expr_loc (clause_loc, x);
5376 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5377 gimplify_assign (new_var, x, ilist);
5379 /* GOMP_taskgroup_reduction_register memsets the whole
5380 array to zero. If the initializer is zero, we don't
5381 need to initialize it again, just mark it as ever
5382 used unconditionally, i.e. cond = true. */
5383 if (cond
5384 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5385 && initializer_zerop (omp_reduction_init (c,
5386 TREE_TYPE (type))))
5388 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5389 boolean_true_node);
5390 gimple_seq_add_stmt (ilist, g);
5391 continue;
5393 tree end = create_artificial_label (UNKNOWN_LOCATION);
5394 if (cond)
5396 gimple *g;
5397 if (!is_parallel_ctx (ctx))
5399 tree condv = create_tmp_var (boolean_type_node);
5400 g = gimple_build_assign (condv,
5401 build_simple_mem_ref (cond));
5402 gimple_seq_add_stmt (ilist, g);
5403 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5404 g = gimple_build_cond (NE_EXPR, condv,
5405 boolean_false_node, end, lab1);
5406 gimple_seq_add_stmt (ilist, g);
5407 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5409 g = gimple_build_assign (build_simple_mem_ref (cond),
5410 boolean_true_node);
5411 gimple_seq_add_stmt (ilist, g);
5414 tree y1 = create_tmp_var (ptype);
5415 gimplify_assign (y1, y, ilist);
5416 tree i2 = NULL_TREE, y2 = NULL_TREE;
5417 tree body2 = NULL_TREE, end2 = NULL_TREE;
5418 tree y3 = NULL_TREE, y4 = NULL_TREE;
5419 if (task_reduction_needs_orig_p)
5421 y3 = create_tmp_var (ptype);
5422 tree ref;
5423 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5424 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5425 size_int (task_reduction_cnt_full
5426 + task_reduction_cntorig - 1),
5427 NULL_TREE, NULL_TREE);
5428 else
5430 unsigned int idx = *ctx->task_reduction_map->get (c);
5431 ref = task_reduction_read (ilist, tskred_temp, ptype,
5432 7 + 3 * idx);
5434 gimplify_assign (y3, ref, ilist);
5436 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5438 if (pass != 3)
5440 y2 = create_tmp_var (ptype);
5441 gimplify_assign (y2, y, ilist);
5443 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5445 tree ref = build_outer_var_ref (var, ctx);
5446 /* For ref build_outer_var_ref already performs this. */
5447 if (TREE_CODE (d) == INDIRECT_REF)
5448 gcc_assert (omp_privatize_by_reference (var));
5449 else if (TREE_CODE (d) == ADDR_EXPR)
5450 ref = build_fold_addr_expr (ref);
5451 else if (omp_privatize_by_reference (var))
5452 ref = build_fold_addr_expr (ref);
5453 ref = fold_convert_loc (clause_loc, ptype, ref);
5454 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5455 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5457 y3 = create_tmp_var (ptype);
5458 gimplify_assign (y3, unshare_expr (ref), ilist);
5460 if (is_simd)
5462 y4 = create_tmp_var (ptype);
5463 gimplify_assign (y4, ref, dlist);
5467 tree i = create_tmp_var (TREE_TYPE (v));
5468 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5469 tree body = create_artificial_label (UNKNOWN_LOCATION);
5470 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5471 if (y2)
5473 i2 = create_tmp_var (TREE_TYPE (v));
5474 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5475 body2 = create_artificial_label (UNKNOWN_LOCATION);
5476 end2 = create_artificial_label (UNKNOWN_LOCATION);
5477 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5479 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5481 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5482 tree decl_placeholder
5483 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5484 SET_DECL_VALUE_EXPR (decl_placeholder,
5485 build_simple_mem_ref (y1));
5486 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5487 SET_DECL_VALUE_EXPR (placeholder,
5488 y3 ? build_simple_mem_ref (y3)
5489 : error_mark_node);
5490 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5491 x = lang_hooks.decls.omp_clause_default_ctor
5492 (c, build_simple_mem_ref (y1),
5493 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5494 if (x)
5495 gimplify_and_add (x, ilist);
5496 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5498 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5499 lower_omp (&tseq, ctx);
5500 gimple_seq_add_seq (ilist, tseq);
5502 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5503 if (is_simd)
5505 SET_DECL_VALUE_EXPR (decl_placeholder,
5506 build_simple_mem_ref (y2));
5507 SET_DECL_VALUE_EXPR (placeholder,
5508 build_simple_mem_ref (y4));
5509 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5510 lower_omp (&tseq, ctx);
5511 gimple_seq_add_seq (dlist, tseq);
5512 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5514 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5515 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5516 if (y2)
5518 x = lang_hooks.decls.omp_clause_dtor
5519 (c, build_simple_mem_ref (y2));
5520 if (x)
5521 gimplify_and_add (x, dlist);
5524 else
5526 x = omp_reduction_init (c, TREE_TYPE (type));
5527 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5529 /* reduction(-:var) sums up the partial results, so it
5530 acts identically to reduction(+:var). */
5531 if (code == MINUS_EXPR)
5532 code = PLUS_EXPR;
5534 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5535 if (is_simd)
5537 x = build2 (code, TREE_TYPE (type),
5538 build_simple_mem_ref (y4),
5539 build_simple_mem_ref (y2));
5540 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5543 gimple *g
5544 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5545 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5546 gimple_seq_add_stmt (ilist, g);
5547 if (y3)
5549 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5550 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5551 gimple_seq_add_stmt (ilist, g);
5553 g = gimple_build_assign (i, PLUS_EXPR, i,
5554 build_int_cst (TREE_TYPE (i), 1));
5555 gimple_seq_add_stmt (ilist, g);
5556 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5557 gimple_seq_add_stmt (ilist, g);
5558 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5559 if (y2)
5561 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5562 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5563 gimple_seq_add_stmt (dlist, g);
5564 if (y4)
5566 g = gimple_build_assign
5567 (y4, POINTER_PLUS_EXPR, y4,
5568 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5569 gimple_seq_add_stmt (dlist, g);
5571 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5572 build_int_cst (TREE_TYPE (i2), 1));
5573 gimple_seq_add_stmt (dlist, g);
5574 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5575 gimple_seq_add_stmt (dlist, g);
5576 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5578 if (allocator)
5580 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5581 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5582 gimple_seq_add_stmt (dlist, g);
5584 continue;
5586 else if (pass == 2)
5588 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5589 if (is_global_var (out))
5590 x = var;
5591 else if (is_omp_target (ctx->stmt))
5592 x = out;
5593 else
5595 bool by_ref = use_pointer_for_field (var, ctx);
5596 x = build_receiver_ref (var, by_ref, ctx);
5598 if (!omp_privatize_by_reference (var))
5599 x = build_fold_addr_expr (x);
5600 x = fold_convert (ptr_type_node, x);
5601 unsigned cnt = task_reduction_cnt - 1;
5602 if (!task_reduction_needs_orig_p)
5603 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5604 else
5605 cnt = task_reduction_cntorig - 1;
5606 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5607 size_int (cnt), NULL_TREE, NULL_TREE);
5608 gimplify_assign (r, x, ilist);
5609 continue;
5611 else if (pass == 3)
5613 tree type = TREE_TYPE (new_var);
5614 if (!omp_privatize_by_reference (var))
5615 type = build_pointer_type (type);
5616 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5618 unsigned cnt = task_reduction_cnt - 1;
5619 if (!task_reduction_needs_orig_p)
5620 cnt += (task_reduction_cntorig_full
5621 - task_reduction_cntorig);
5622 else
5623 cnt = task_reduction_cntorig - 1;
5624 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5625 size_int (cnt), NULL_TREE, NULL_TREE);
5627 else
5629 unsigned int idx = *ctx->task_reduction_map->get (c);
5630 tree off;
5631 if (ctx->task_reductions[1 + idx])
5632 off = fold_convert (sizetype,
5633 ctx->task_reductions[1 + idx]);
5634 else
5635 off = task_reduction_read (ilist, tskred_temp, sizetype,
5636 7 + 3 * idx + 1);
5637 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5638 tskred_base, off);
5640 x = fold_convert (type, x);
5641 tree t;
5642 if (omp_privatize_by_reference (var))
5644 gimplify_assign (new_var, x, ilist);
5645 t = new_var;
5646 new_var = build_simple_mem_ref (new_var);
5648 else
5650 t = create_tmp_var (type);
5651 gimplify_assign (t, x, ilist);
5652 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5653 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5655 t = fold_convert (build_pointer_type (boolean_type_node), t);
5656 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5657 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5658 cond = create_tmp_var (TREE_TYPE (t));
5659 gimplify_assign (cond, t, ilist);
5661 else if (is_variable_sized (var))
5663 /* For variable sized types, we need to allocate the
5664 actual storage here. Call alloca and store the
5665 result in the pointer decl that we created elsewhere. */
5666 if (pass == 0)
5667 continue;
5669 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5671 tree tmp;
5673 ptr = DECL_VALUE_EXPR (new_var);
5674 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5675 ptr = TREE_OPERAND (ptr, 0);
5676 gcc_assert (DECL_P (ptr));
5677 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5679 if (lower_private_allocate (var, new_var, allocator,
5680 allocate_ptr, ilist, ctx,
5681 false, x))
5682 tmp = allocate_ptr;
5683 else
5685 /* void *tmp = __builtin_alloca */
5686 tree atmp
5687 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5688 gcall *stmt
5689 = gimple_build_call (atmp, 2, x,
5690 size_int (DECL_ALIGN (var)));
5691 cfun->calls_alloca = 1;
5692 tmp = create_tmp_var_raw (ptr_type_node);
5693 gimple_add_tmp_var (tmp);
5694 gimple_call_set_lhs (stmt, tmp);
5696 gimple_seq_add_stmt (ilist, stmt);
5699 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5700 gimplify_assign (ptr, x, ilist);
5703 else if (omp_privatize_by_reference (var)
5704 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5705 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5707 /* For references that are being privatized for Fortran,
5708 allocate new backing storage for the new pointer
5709 variable. This allows us to avoid changing all the
5710 code that expects a pointer to something that expects
5711 a direct variable. */
5712 if (pass == 0)
5713 continue;
5715 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5716 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5718 x = build_receiver_ref (var, false, ctx);
5719 if (ctx->allocate_map)
5720 if (tree *allocatep = ctx->allocate_map->get (var))
5722 allocator = *allocatep;
5723 if (TREE_CODE (allocator) == TREE_LIST)
5724 allocator = TREE_PURPOSE (allocator);
5725 if (TREE_CODE (allocator) != INTEGER_CST)
5726 allocator = build_outer_var_ref (allocator, ctx);
5727 allocator = fold_convert (pointer_sized_int_node,
5728 allocator);
5729 allocate_ptr = unshare_expr (x);
5731 if (allocator == NULL_TREE)
5732 x = build_fold_addr_expr_loc (clause_loc, x);
5734 else if (lower_private_allocate (var, new_var, allocator,
5735 allocate_ptr,
5736 ilist, ctx, true, x))
5737 x = allocate_ptr;
5738 else if (TREE_CONSTANT (x))
5740 /* For reduction in SIMD loop, defer adding the
5741 initialization of the reference, because if we decide
5742 to use SIMD array for it, the initilization could cause
5743 expansion ICE. Ditto for other privatization clauses. */
5744 if (is_simd)
5745 x = NULL_TREE;
5746 else
5748 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5749 get_name (var));
5750 gimple_add_tmp_var (x);
5751 TREE_ADDRESSABLE (x) = 1;
5752 x = build_fold_addr_expr_loc (clause_loc, x);
5755 else
5757 tree atmp
5758 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5759 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5760 tree al = size_int (TYPE_ALIGN (rtype));
5761 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5764 if (x)
5766 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5767 gimplify_assign (new_var, x, ilist);
5770 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5772 else if ((c_kind == OMP_CLAUSE_REDUCTION
5773 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5774 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5776 if (pass == 0)
5777 continue;
5779 else if (pass != 0)
5780 continue;
5782 switch (OMP_CLAUSE_CODE (c))
5784 case OMP_CLAUSE_SHARED:
5785 /* Ignore shared directives in teams construct inside
5786 target construct. */
5787 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5788 && !is_host_teams_ctx (ctx))
5789 continue;
5790 /* Shared global vars are just accessed directly. */
5791 if (is_global_var (new_var))
5792 break;
5793 /* For taskloop firstprivate/lastprivate, represented
5794 as firstprivate and shared clause on the task, new_var
5795 is the firstprivate var. */
5796 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5797 break;
5798 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5799 needs to be delayed until after fixup_child_record_type so
5800 that we get the correct type during the dereference. */
5801 by_ref = use_pointer_for_field (var, ctx);
5802 x = build_receiver_ref (var, by_ref, ctx);
5803 SET_DECL_VALUE_EXPR (new_var, x);
5804 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5806 /* ??? If VAR is not passed by reference, and the variable
5807 hasn't been initialized yet, then we'll get a warning for
5808 the store into the omp_data_s structure. Ideally, we'd be
5809 able to notice this and not store anything at all, but
5810 we're generating code too early. Suppress the warning. */
5811 if (!by_ref)
5812 suppress_warning (var, OPT_Wuninitialized);
5813 break;
5815 case OMP_CLAUSE__CONDTEMP_:
5816 if (is_parallel_ctx (ctx))
5818 x = build_receiver_ref (var, false, ctx);
5819 SET_DECL_VALUE_EXPR (new_var, x);
5820 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5822 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5824 x = build_zero_cst (TREE_TYPE (var));
5825 goto do_private;
5827 break;
5829 case OMP_CLAUSE_LASTPRIVATE:
5830 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5831 break;
5832 /* FALLTHRU */
5834 case OMP_CLAUSE_PRIVATE:
5835 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5836 x = build_outer_var_ref (var, ctx);
5837 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5839 if (is_task_ctx (ctx))
5840 x = build_receiver_ref (var, false, ctx);
5841 else
5842 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5844 else
5845 x = NULL;
5846 do_private:
5847 tree nx;
5848 bool copy_ctor;
5849 copy_ctor = false;
5850 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5851 ilist, ctx, false, NULL_TREE);
5852 nx = unshare_expr (new_var);
5853 if (is_simd
5854 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5855 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5856 copy_ctor = true;
5857 if (copy_ctor)
5858 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5859 else
5860 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5861 if (is_simd)
5863 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5864 if ((TREE_ADDRESSABLE (new_var) || nx || y
5865 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5866 && (gimple_omp_for_collapse (ctx->stmt) != 1
5867 || (gimple_omp_for_index (ctx->stmt, 0)
5868 != new_var)))
5869 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5870 || omp_privatize_by_reference (var))
5871 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5872 ivar, lvar))
5874 if (omp_privatize_by_reference (var))
5876 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5877 tree new_vard = TREE_OPERAND (new_var, 0);
5878 gcc_assert (DECL_P (new_vard));
5879 SET_DECL_VALUE_EXPR (new_vard,
5880 build_fold_addr_expr (lvar));
5881 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5884 if (nx)
5886 tree iv = unshare_expr (ivar);
5887 if (copy_ctor)
5888 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5890 else
5891 x = lang_hooks.decls.omp_clause_default_ctor (c,
5895 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5897 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5898 unshare_expr (ivar), x);
5899 nx = x;
5901 if (nx && x)
5902 gimplify_and_add (x, &llist[0]);
5903 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5904 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5906 tree v = new_var;
5907 if (!DECL_P (v))
5909 gcc_assert (TREE_CODE (v) == MEM_REF);
5910 v = TREE_OPERAND (v, 0);
5911 gcc_assert (DECL_P (v));
5913 v = *ctx->lastprivate_conditional_map->get (v);
5914 tree t = create_tmp_var (TREE_TYPE (v));
5915 tree z = build_zero_cst (TREE_TYPE (v));
5916 tree orig_v
5917 = build_outer_var_ref (var, ctx,
5918 OMP_CLAUSE_LASTPRIVATE);
5919 gimple_seq_add_stmt (dlist,
5920 gimple_build_assign (t, z));
5921 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5922 tree civar = DECL_VALUE_EXPR (v);
5923 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5924 civar = unshare_expr (civar);
5925 TREE_OPERAND (civar, 1) = sctx.idx;
5926 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5927 unshare_expr (civar));
5928 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5929 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5930 orig_v, unshare_expr (ivar)));
5931 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5932 civar);
5933 x = build3 (COND_EXPR, void_type_node, cond, x,
5934 void_node);
5935 gimple_seq tseq = NULL;
5936 gimplify_and_add (x, &tseq);
5937 if (ctx->outer)
5938 lower_omp (&tseq, ctx->outer);
5939 gimple_seq_add_seq (&llist[1], tseq);
5941 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5942 && ctx->for_simd_scan_phase)
5944 x = unshare_expr (ivar);
5945 tree orig_v
5946 = build_outer_var_ref (var, ctx,
5947 OMP_CLAUSE_LASTPRIVATE);
5948 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5949 orig_v);
5950 gimplify_and_add (x, &llist[0]);
5952 if (y)
5954 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5955 if (y)
5956 gimplify_and_add (y, &llist[1]);
5958 break;
5960 if (omp_privatize_by_reference (var))
5962 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5963 tree new_vard = TREE_OPERAND (new_var, 0);
5964 gcc_assert (DECL_P (new_vard));
5965 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5966 x = TYPE_SIZE_UNIT (type);
5967 if (TREE_CONSTANT (x))
5969 x = create_tmp_var_raw (type, get_name (var));
5970 gimple_add_tmp_var (x);
5971 TREE_ADDRESSABLE (x) = 1;
5972 x = build_fold_addr_expr_loc (clause_loc, x);
5973 x = fold_convert_loc (clause_loc,
5974 TREE_TYPE (new_vard), x);
5975 gimplify_assign (new_vard, x, ilist);
5979 if (nx)
5980 gimplify_and_add (nx, ilist);
5981 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5982 && is_simd
5983 && ctx->for_simd_scan_phase)
5985 tree orig_v = build_outer_var_ref (var, ctx,
5986 OMP_CLAUSE_LASTPRIVATE);
5987 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5988 orig_v);
5989 gimplify_and_add (x, ilist);
5991 /* FALLTHRU */
5993 do_dtor:
5994 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5995 if (x)
5996 gimplify_and_add (x, dlist);
5997 if (allocator)
5999 if (!is_gimple_val (allocator))
6001 tree avar = create_tmp_var (TREE_TYPE (allocator));
6002 gimplify_assign (avar, allocator, dlist);
6003 allocator = avar;
6005 if (!is_gimple_val (allocate_ptr))
6007 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6008 gimplify_assign (apvar, allocate_ptr, dlist);
6009 allocate_ptr = apvar;
6011 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
6012 gimple *g
6013 = gimple_build_call (f, 2, allocate_ptr, allocator);
6014 gimple_seq_add_stmt (dlist, g);
6016 break;
6018 case OMP_CLAUSE_LINEAR:
6019 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6020 goto do_firstprivate;
6021 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6022 x = NULL;
6023 else
6024 x = build_outer_var_ref (var, ctx);
6025 goto do_private;
6027 case OMP_CLAUSE_FIRSTPRIVATE:
6028 if (is_task_ctx (ctx))
6030 if ((omp_privatize_by_reference (var)
6031 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6032 || is_variable_sized (var))
6033 goto do_dtor;
6034 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
6035 ctx))
6036 || use_pointer_for_field (var, NULL))
6038 x = build_receiver_ref (var, false, ctx);
6039 if (ctx->allocate_map)
6040 if (tree *allocatep = ctx->allocate_map->get (var))
6042 allocator = *allocatep;
6043 if (TREE_CODE (allocator) == TREE_LIST)
6044 allocator = TREE_PURPOSE (allocator);
6045 if (TREE_CODE (allocator) != INTEGER_CST)
6046 allocator = build_outer_var_ref (allocator, ctx);
6047 allocator = fold_convert (pointer_sized_int_node,
6048 allocator);
6049 allocate_ptr = unshare_expr (x);
6050 x = build_simple_mem_ref (x);
6051 TREE_THIS_NOTRAP (x) = 1;
6053 SET_DECL_VALUE_EXPR (new_var, x);
6054 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6055 goto do_dtor;
6058 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6059 && omp_privatize_by_reference (var))
6061 x = build_outer_var_ref (var, ctx);
6062 gcc_assert (TREE_CODE (x) == MEM_REF
6063 && integer_zerop (TREE_OPERAND (x, 1)));
6064 x = TREE_OPERAND (x, 0);
6065 x = lang_hooks.decls.omp_clause_copy_ctor
6066 (c, unshare_expr (new_var), x);
6067 gimplify_and_add (x, ilist);
6068 goto do_dtor;
6070 do_firstprivate:
6071 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6072 ilist, ctx, false, NULL_TREE);
6073 x = build_outer_var_ref (var, ctx);
6074 if (is_simd)
6076 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6077 && gimple_omp_for_combined_into_p (ctx->stmt))
6079 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6080 tree stept = TREE_TYPE (t);
6081 tree ct = omp_find_clause (clauses,
6082 OMP_CLAUSE__LOOPTEMP_);
6083 gcc_assert (ct);
6084 tree l = OMP_CLAUSE_DECL (ct);
6085 tree n1 = fd->loop.n1;
6086 tree step = fd->loop.step;
6087 tree itype = TREE_TYPE (l);
6088 if (POINTER_TYPE_P (itype))
6089 itype = signed_type_for (itype);
6090 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6091 if (TYPE_UNSIGNED (itype)
6092 && fd->loop.cond_code == GT_EXPR)
6093 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6094 fold_build1 (NEGATE_EXPR, itype, l),
6095 fold_build1 (NEGATE_EXPR,
6096 itype, step));
6097 else
6098 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6099 t = fold_build2 (MULT_EXPR, stept,
6100 fold_convert (stept, l), t);
6102 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6104 if (omp_privatize_by_reference (var))
6106 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6107 tree new_vard = TREE_OPERAND (new_var, 0);
6108 gcc_assert (DECL_P (new_vard));
6109 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6110 nx = TYPE_SIZE_UNIT (type);
6111 if (TREE_CONSTANT (nx))
6113 nx = create_tmp_var_raw (type,
6114 get_name (var));
6115 gimple_add_tmp_var (nx);
6116 TREE_ADDRESSABLE (nx) = 1;
6117 nx = build_fold_addr_expr_loc (clause_loc,
6118 nx);
6119 nx = fold_convert_loc (clause_loc,
6120 TREE_TYPE (new_vard),
6121 nx);
6122 gimplify_assign (new_vard, nx, ilist);
6126 x = lang_hooks.decls.omp_clause_linear_ctor
6127 (c, new_var, x, t);
6128 gimplify_and_add (x, ilist);
6129 goto do_dtor;
6132 if (POINTER_TYPE_P (TREE_TYPE (x)))
6133 x = fold_build2 (POINTER_PLUS_EXPR,
6134 TREE_TYPE (x), x, t);
6135 else
6136 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
6139 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6140 || TREE_ADDRESSABLE (new_var)
6141 || omp_privatize_by_reference (var))
6142 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6143 ivar, lvar))
6145 if (omp_privatize_by_reference (var))
6147 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6148 tree new_vard = TREE_OPERAND (new_var, 0);
6149 gcc_assert (DECL_P (new_vard));
6150 SET_DECL_VALUE_EXPR (new_vard,
6151 build_fold_addr_expr (lvar));
6152 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6154 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6156 tree iv = create_tmp_var (TREE_TYPE (new_var));
6157 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6158 gimplify_and_add (x, ilist);
6159 gimple_stmt_iterator gsi
6160 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6161 gassign *g
6162 = gimple_build_assign (unshare_expr (lvar), iv);
6163 gsi_insert_before_without_update (&gsi, g,
6164 GSI_SAME_STMT);
6165 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6166 enum tree_code code = PLUS_EXPR;
6167 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6168 code = POINTER_PLUS_EXPR;
6169 g = gimple_build_assign (iv, code, iv, t);
6170 gsi_insert_before_without_update (&gsi, g,
6171 GSI_SAME_STMT);
6172 break;
6174 x = lang_hooks.decls.omp_clause_copy_ctor
6175 (c, unshare_expr (ivar), x);
6176 gimplify_and_add (x, &llist[0]);
6177 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6178 if (x)
6179 gimplify_and_add (x, &llist[1]);
6180 break;
6182 if (omp_privatize_by_reference (var))
6184 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6185 tree new_vard = TREE_OPERAND (new_var, 0);
6186 gcc_assert (DECL_P (new_vard));
6187 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6188 nx = TYPE_SIZE_UNIT (type);
6189 if (TREE_CONSTANT (nx))
6191 nx = create_tmp_var_raw (type, get_name (var));
6192 gimple_add_tmp_var (nx);
6193 TREE_ADDRESSABLE (nx) = 1;
6194 nx = build_fold_addr_expr_loc (clause_loc, nx);
6195 nx = fold_convert_loc (clause_loc,
6196 TREE_TYPE (new_vard), nx);
6197 gimplify_assign (new_vard, nx, ilist);
6201 x = lang_hooks.decls.omp_clause_copy_ctor
6202 (c, unshare_expr (new_var), x);
6203 gimplify_and_add (x, ilist);
6204 goto do_dtor;
6206 case OMP_CLAUSE__LOOPTEMP_:
6207 case OMP_CLAUSE__REDUCTEMP_:
6208 gcc_assert (is_taskreg_ctx (ctx));
6209 x = build_outer_var_ref (var, ctx);
6210 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6211 gimplify_and_add (x, ilist);
6212 break;
6214 case OMP_CLAUSE_COPYIN:
6215 by_ref = use_pointer_for_field (var, NULL);
6216 x = build_receiver_ref (var, by_ref, ctx);
6217 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6218 append_to_statement_list (x, &copyin_seq);
6219 copyin_by_ref |= by_ref;
6220 break;
6222 case OMP_CLAUSE_REDUCTION:
6223 case OMP_CLAUSE_IN_REDUCTION:
6224 /* OpenACC reductions are initialized using the
6225 GOACC_REDUCTION internal function. */
6226 if (is_gimple_omp_oacc (ctx->stmt))
6227 break;
6228 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6230 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6231 gimple *tseq;
6232 tree ptype = TREE_TYPE (placeholder);
6233 if (cond)
6235 x = error_mark_node;
6236 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6237 && !task_reduction_needs_orig_p)
6238 x = var;
6239 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6241 tree pptype = build_pointer_type (ptype);
6242 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6243 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6244 size_int (task_reduction_cnt_full
6245 + task_reduction_cntorig - 1),
6246 NULL_TREE, NULL_TREE);
6247 else
6249 unsigned int idx
6250 = *ctx->task_reduction_map->get (c);
6251 x = task_reduction_read (ilist, tskred_temp,
6252 pptype, 7 + 3 * idx);
6254 x = fold_convert (pptype, x);
6255 x = build_simple_mem_ref (x);
6258 else
6260 lower_private_allocate (var, new_var, allocator,
6261 allocate_ptr, ilist, ctx, false,
6262 NULL_TREE);
6263 x = build_outer_var_ref (var, ctx);
6265 if (omp_privatize_by_reference (var)
6266 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6267 x = build_fold_addr_expr_loc (clause_loc, x);
6269 SET_DECL_VALUE_EXPR (placeholder, x);
6270 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6271 tree new_vard = new_var;
6272 if (omp_privatize_by_reference (var))
6274 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6275 new_vard = TREE_OPERAND (new_var, 0);
6276 gcc_assert (DECL_P (new_vard));
6278 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6279 if (is_simd
6280 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6281 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6282 rvarp = &rvar;
6283 if (is_simd
6284 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6285 ivar, lvar, rvarp,
6286 &rvar2))
6288 if (new_vard == new_var)
6290 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6291 SET_DECL_VALUE_EXPR (new_var, ivar);
6293 else
6295 SET_DECL_VALUE_EXPR (new_vard,
6296 build_fold_addr_expr (ivar));
6297 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6299 x = lang_hooks.decls.omp_clause_default_ctor
6300 (c, unshare_expr (ivar),
6301 build_outer_var_ref (var, ctx));
6302 if (rvarp && ctx->for_simd_scan_phase)
6304 if (x)
6305 gimplify_and_add (x, &llist[0]);
6306 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6307 if (x)
6308 gimplify_and_add (x, &llist[1]);
6309 break;
6311 else if (rvarp)
6313 if (x)
6315 gimplify_and_add (x, &llist[0]);
6317 tree ivar2 = unshare_expr (lvar);
6318 TREE_OPERAND (ivar2, 1) = sctx.idx;
6319 x = lang_hooks.decls.omp_clause_default_ctor
6320 (c, ivar2, build_outer_var_ref (var, ctx));
6321 gimplify_and_add (x, &llist[0]);
6323 if (rvar2)
6325 x = lang_hooks.decls.omp_clause_default_ctor
6326 (c, unshare_expr (rvar2),
6327 build_outer_var_ref (var, ctx));
6328 gimplify_and_add (x, &llist[0]);
6331 /* For types that need construction, add another
6332 private var which will be default constructed
6333 and optionally initialized with
6334 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6335 loop we want to assign this value instead of
6336 constructing and destructing it in each
6337 iteration. */
6338 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6339 gimple_add_tmp_var (nv);
6340 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6341 ? rvar2
6342 : ivar, 0),
6343 nv);
6344 x = lang_hooks.decls.omp_clause_default_ctor
6345 (c, nv, build_outer_var_ref (var, ctx));
6346 gimplify_and_add (x, ilist);
6348 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6350 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6351 x = DECL_VALUE_EXPR (new_vard);
6352 tree vexpr = nv;
6353 if (new_vard != new_var)
6354 vexpr = build_fold_addr_expr (nv);
6355 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6356 lower_omp (&tseq, ctx);
6357 SET_DECL_VALUE_EXPR (new_vard, x);
6358 gimple_seq_add_seq (ilist, tseq);
6359 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6362 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6363 if (x)
6364 gimplify_and_add (x, dlist);
6367 tree ref = build_outer_var_ref (var, ctx);
6368 x = unshare_expr (ivar);
6369 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6370 ref);
6371 gimplify_and_add (x, &llist[0]);
6373 ref = build_outer_var_ref (var, ctx);
6374 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6375 rvar);
6376 gimplify_and_add (x, &llist[3]);
6378 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6379 if (new_vard == new_var)
6380 SET_DECL_VALUE_EXPR (new_var, lvar);
6381 else
6382 SET_DECL_VALUE_EXPR (new_vard,
6383 build_fold_addr_expr (lvar));
6385 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6386 if (x)
6387 gimplify_and_add (x, &llist[1]);
6389 tree ivar2 = unshare_expr (lvar);
6390 TREE_OPERAND (ivar2, 1) = sctx.idx;
6391 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6392 if (x)
6393 gimplify_and_add (x, &llist[1]);
6395 if (rvar2)
6397 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6398 if (x)
6399 gimplify_and_add (x, &llist[1]);
6401 break;
6403 if (x)
6404 gimplify_and_add (x, &llist[0]);
6405 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6407 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6408 lower_omp (&tseq, ctx);
6409 gimple_seq_add_seq (&llist[0], tseq);
6411 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6412 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6413 lower_omp (&tseq, ctx);
6414 gimple_seq_add_seq (&llist[1], tseq);
6415 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6416 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6417 if (new_vard == new_var)
6418 SET_DECL_VALUE_EXPR (new_var, lvar);
6419 else
6420 SET_DECL_VALUE_EXPR (new_vard,
6421 build_fold_addr_expr (lvar));
6422 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6423 if (x)
6424 gimplify_and_add (x, &llist[1]);
6425 break;
6427 /* If this is a reference to constant size reduction var
6428 with placeholder, we haven't emitted the initializer
6429 for it because it is undesirable if SIMD arrays are used.
6430 But if they aren't used, we need to emit the deferred
6431 initialization now. */
6432 else if (omp_privatize_by_reference (var) && is_simd)
6433 handle_simd_reference (clause_loc, new_vard, ilist);
6435 tree lab2 = NULL_TREE;
6436 if (cond)
6438 gimple *g;
6439 if (!is_parallel_ctx (ctx))
6441 tree condv = create_tmp_var (boolean_type_node);
6442 tree m = build_simple_mem_ref (cond);
6443 g = gimple_build_assign (condv, m);
6444 gimple_seq_add_stmt (ilist, g);
6445 tree lab1
6446 = create_artificial_label (UNKNOWN_LOCATION);
6447 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6448 g = gimple_build_cond (NE_EXPR, condv,
6449 boolean_false_node,
6450 lab2, lab1);
6451 gimple_seq_add_stmt (ilist, g);
6452 gimple_seq_add_stmt (ilist,
6453 gimple_build_label (lab1));
6455 g = gimple_build_assign (build_simple_mem_ref (cond),
6456 boolean_true_node);
6457 gimple_seq_add_stmt (ilist, g);
6459 x = lang_hooks.decls.omp_clause_default_ctor
6460 (c, unshare_expr (new_var),
6461 cond ? NULL_TREE
6462 : build_outer_var_ref (var, ctx));
6463 if (x)
6464 gimplify_and_add (x, ilist);
6466 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6467 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6469 if (ctx->for_simd_scan_phase)
6470 goto do_dtor;
6471 if (x || (!is_simd
6472 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6474 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6475 gimple_add_tmp_var (nv);
6476 ctx->cb.decl_map->put (new_vard, nv);
6477 x = lang_hooks.decls.omp_clause_default_ctor
6478 (c, nv, build_outer_var_ref (var, ctx));
6479 if (x)
6480 gimplify_and_add (x, ilist);
6481 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6483 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6484 tree vexpr = nv;
6485 if (new_vard != new_var)
6486 vexpr = build_fold_addr_expr (nv);
6487 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6488 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6489 lower_omp (&tseq, ctx);
6490 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6491 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6492 gimple_seq_add_seq (ilist, tseq);
6494 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6495 if (is_simd && ctx->scan_exclusive)
6497 tree nv2
6498 = create_tmp_var_raw (TREE_TYPE (new_var));
6499 gimple_add_tmp_var (nv2);
6500 ctx->cb.decl_map->put (nv, nv2);
6501 x = lang_hooks.decls.omp_clause_default_ctor
6502 (c, nv2, build_outer_var_ref (var, ctx));
6503 gimplify_and_add (x, ilist);
6504 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6505 if (x)
6506 gimplify_and_add (x, dlist);
6508 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6509 if (x)
6510 gimplify_and_add (x, dlist);
6512 else if (is_simd
6513 && ctx->scan_exclusive
6514 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6516 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6517 gimple_add_tmp_var (nv2);
6518 ctx->cb.decl_map->put (new_vard, nv2);
6519 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6520 if (x)
6521 gimplify_and_add (x, dlist);
6523 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6524 goto do_dtor;
6527 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6529 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6530 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6531 && is_omp_target (ctx->stmt))
6533 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6534 tree oldv = NULL_TREE;
6535 gcc_assert (d);
6536 if (DECL_HAS_VALUE_EXPR_P (d))
6537 oldv = DECL_VALUE_EXPR (d);
6538 SET_DECL_VALUE_EXPR (d, new_vard);
6539 DECL_HAS_VALUE_EXPR_P (d) = 1;
6540 lower_omp (&tseq, ctx);
6541 if (oldv)
6542 SET_DECL_VALUE_EXPR (d, oldv);
6543 else
6545 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6546 DECL_HAS_VALUE_EXPR_P (d) = 0;
6549 else
6550 lower_omp (&tseq, ctx);
6551 gimple_seq_add_seq (ilist, tseq);
6553 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6554 if (is_simd)
6556 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6557 lower_omp (&tseq, ctx);
6558 gimple_seq_add_seq (dlist, tseq);
6559 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6561 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6562 if (cond)
6564 if (lab2)
6565 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6566 break;
6568 goto do_dtor;
6570 else
6572 x = omp_reduction_init (c, TREE_TYPE (new_var));
6573 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6574 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6576 if (cond)
6578 gimple *g;
6579 tree lab2 = NULL_TREE;
6580 /* GOMP_taskgroup_reduction_register memsets the whole
6581 array to zero. If the initializer is zero, we don't
6582 need to initialize it again, just mark it as ever
6583 used unconditionally, i.e. cond = true. */
6584 if (initializer_zerop (x))
6586 g = gimple_build_assign (build_simple_mem_ref (cond),
6587 boolean_true_node);
6588 gimple_seq_add_stmt (ilist, g);
6589 break;
6592 /* Otherwise, emit
6593 if (!cond) { cond = true; new_var = x; } */
6594 if (!is_parallel_ctx (ctx))
6596 tree condv = create_tmp_var (boolean_type_node);
6597 tree m = build_simple_mem_ref (cond);
6598 g = gimple_build_assign (condv, m);
6599 gimple_seq_add_stmt (ilist, g);
6600 tree lab1
6601 = create_artificial_label (UNKNOWN_LOCATION);
6602 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6603 g = gimple_build_cond (NE_EXPR, condv,
6604 boolean_false_node,
6605 lab2, lab1);
6606 gimple_seq_add_stmt (ilist, g);
6607 gimple_seq_add_stmt (ilist,
6608 gimple_build_label (lab1));
6610 g = gimple_build_assign (build_simple_mem_ref (cond),
6611 boolean_true_node);
6612 gimple_seq_add_stmt (ilist, g);
6613 gimplify_assign (new_var, x, ilist);
6614 if (lab2)
6615 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6616 break;
6619 /* reduction(-:var) sums up the partial results, so it
6620 acts identically to reduction(+:var). */
6621 if (code == MINUS_EXPR)
6622 code = PLUS_EXPR;
6624 bool is_truth_op
6625 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6626 tree new_vard = new_var;
6627 if (is_simd && omp_privatize_by_reference (var))
6629 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6630 new_vard = TREE_OPERAND (new_var, 0);
6631 gcc_assert (DECL_P (new_vard));
6633 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6634 if (is_simd
6635 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6636 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6637 rvarp = &rvar;
6638 if (is_simd
6639 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6640 ivar, lvar, rvarp,
6641 &rvar2))
6643 if (new_vard != new_var)
6645 SET_DECL_VALUE_EXPR (new_vard,
6646 build_fold_addr_expr (lvar));
6647 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6650 tree ref = build_outer_var_ref (var, ctx);
6652 if (rvarp)
6654 if (ctx->for_simd_scan_phase)
6655 break;
6656 gimplify_assign (ivar, ref, &llist[0]);
6657 ref = build_outer_var_ref (var, ctx);
6658 gimplify_assign (ref, rvar, &llist[3]);
6659 break;
6662 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6664 if (sctx.is_simt)
6666 if (!simt_lane)
6667 simt_lane = create_tmp_var (unsigned_type_node);
6668 x = build_call_expr_internal_loc
6669 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6670 TREE_TYPE (ivar), 2, ivar, simt_lane);
6671 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6672 gimplify_assign (ivar, x, &llist[2]);
6674 tree ivar2 = ivar;
6675 tree ref2 = ref;
6676 if (is_truth_op)
6678 tree zero = build_zero_cst (TREE_TYPE (ivar));
6679 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6680 boolean_type_node, ivar,
6681 zero);
6682 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6683 boolean_type_node, ref,
6684 zero);
6686 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6687 if (is_truth_op)
6688 x = fold_convert (TREE_TYPE (ref), x);
6689 ref = build_outer_var_ref (var, ctx);
6690 gimplify_assign (ref, x, &llist[1]);
6693 else
6695 lower_private_allocate (var, new_var, allocator,
6696 allocate_ptr, ilist, ctx,
6697 false, NULL_TREE);
6698 if (omp_privatize_by_reference (var) && is_simd)
6699 handle_simd_reference (clause_loc, new_vard, ilist);
6700 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6701 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6702 break;
6703 gimplify_assign (new_var, x, ilist);
6704 if (is_simd)
6706 tree ref = build_outer_var_ref (var, ctx);
6707 tree new_var2 = new_var;
6708 tree ref2 = ref;
6709 if (is_truth_op)
6711 tree zero = build_zero_cst (TREE_TYPE (new_var));
6712 new_var2
6713 = fold_build2_loc (clause_loc, NE_EXPR,
6714 boolean_type_node, new_var,
6715 zero);
6716 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6717 boolean_type_node, ref,
6718 zero);
6720 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6721 if (is_truth_op)
6722 x = fold_convert (TREE_TYPE (new_var), x);
6723 ref = build_outer_var_ref (var, ctx);
6724 gimplify_assign (ref, x, dlist);
6726 if (allocator)
6727 goto do_dtor;
6730 break;
6732 default:
6733 gcc_unreachable ();
6737 if (tskred_avar)
6739 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6740 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6743 if (known_eq (sctx.max_vf, 1U))
6745 sctx.is_simt = false;
6746 if (ctx->lastprivate_conditional_map)
6748 if (gimple_omp_for_combined_into_p (ctx->stmt))
6750 /* Signal to lower_omp_1 that it should use parent context. */
6751 ctx->combined_into_simd_safelen1 = true;
6752 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6753 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6754 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6756 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6757 omp_context *outer = ctx->outer;
6758 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6759 outer = outer->outer;
6760 tree *v = ctx->lastprivate_conditional_map->get (o);
6761 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6762 tree *pv = outer->lastprivate_conditional_map->get (po);
6763 *v = *pv;
6766 else
6768 /* When not vectorized, treat lastprivate(conditional:) like
6769 normal lastprivate, as there will be just one simd lane
6770 writing the privatized variable. */
6771 delete ctx->lastprivate_conditional_map;
6772 ctx->lastprivate_conditional_map = NULL;
6777 if (nonconst_simd_if)
6779 if (sctx.lane == NULL_TREE)
6781 sctx.idx = create_tmp_var (unsigned_type_node);
6782 sctx.lane = create_tmp_var (unsigned_type_node);
6784 /* FIXME: For now. */
6785 sctx.is_simt = false;
6788 if (sctx.lane || sctx.is_simt)
6790 uid = create_tmp_var (ptr_type_node, "simduid");
6791 /* Don't want uninit warnings on simduid, it is always uninitialized,
6792 but we use it not for the value, but for the DECL_UID only. */
6793 suppress_warning (uid, OPT_Wuninitialized);
6794 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6795 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6796 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6797 gimple_omp_for_set_clauses (ctx->stmt, c);
6799 /* Emit calls denoting privatized variables and initializing a pointer to
6800 structure that holds private variables as fields after ompdevlow pass. */
6801 if (sctx.is_simt)
6803 sctx.simt_eargs[0] = uid;
6804 gimple *g
6805 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6806 gimple_call_set_lhs (g, uid);
6807 gimple_seq_add_stmt (ilist, g);
6808 sctx.simt_eargs.release ();
6810 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6811 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6812 gimple_call_set_lhs (g, simtrec);
6813 gimple_seq_add_stmt (ilist, g);
6815 if (sctx.lane)
6817 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6818 2 + (nonconst_simd_if != NULL),
6819 uid, integer_zero_node,
6820 nonconst_simd_if);
6821 gimple_call_set_lhs (g, sctx.lane);
6822 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6823 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6824 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6825 build_int_cst (unsigned_type_node, 0));
6826 gimple_seq_add_stmt (ilist, g);
6827 if (sctx.lastlane)
6829 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6830 2, uid, sctx.lane);
6831 gimple_call_set_lhs (g, sctx.lastlane);
6832 gimple_seq_add_stmt (dlist, g);
6833 gimple_seq_add_seq (dlist, llist[3]);
6835 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6836 if (llist[2])
6838 tree simt_vf = create_tmp_var (unsigned_type_node);
6839 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6840 gimple_call_set_lhs (g, simt_vf);
6841 gimple_seq_add_stmt (dlist, g);
6843 tree t = build_int_cst (unsigned_type_node, 1);
6844 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6845 gimple_seq_add_stmt (dlist, g);
6847 t = build_int_cst (unsigned_type_node, 0);
6848 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6849 gimple_seq_add_stmt (dlist, g);
6851 tree body = create_artificial_label (UNKNOWN_LOCATION);
6852 tree header = create_artificial_label (UNKNOWN_LOCATION);
6853 tree end = create_artificial_label (UNKNOWN_LOCATION);
6854 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6855 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6857 gimple_seq_add_seq (dlist, llist[2]);
6859 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6860 gimple_seq_add_stmt (dlist, g);
6862 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6863 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6864 gimple_seq_add_stmt (dlist, g);
6866 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6868 for (int i = 0; i < 2; i++)
6869 if (llist[i])
6871 tree vf = create_tmp_var (unsigned_type_node);
6872 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6873 gimple_call_set_lhs (g, vf);
6874 gimple_seq *seq = i == 0 ? ilist : dlist;
6875 gimple_seq_add_stmt (seq, g);
6876 tree t = build_int_cst (unsigned_type_node, 0);
6877 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6878 gimple_seq_add_stmt (seq, g);
6879 tree body = create_artificial_label (UNKNOWN_LOCATION);
6880 tree header = create_artificial_label (UNKNOWN_LOCATION);
6881 tree end = create_artificial_label (UNKNOWN_LOCATION);
6882 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6883 gimple_seq_add_stmt (seq, gimple_build_label (body));
6884 gimple_seq_add_seq (seq, llist[i]);
6885 t = build_int_cst (unsigned_type_node, 1);
6886 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6887 gimple_seq_add_stmt (seq, g);
6888 gimple_seq_add_stmt (seq, gimple_build_label (header));
6889 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6890 gimple_seq_add_stmt (seq, g);
6891 gimple_seq_add_stmt (seq, gimple_build_label (end));
6894 if (sctx.is_simt)
6896 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6897 gimple *g
6898 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6899 gimple_seq_add_stmt (dlist, g);
6902 /* The copyin sequence is not to be executed by the main thread, since
6903 that would result in self-copies. Perhaps not visible to scalars,
6904 but it certainly is to C++ operator=. */
6905 if (copyin_seq)
6907 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6909 x = build2 (NE_EXPR, boolean_type_node, x,
6910 build_int_cst (TREE_TYPE (x), 0));
6911 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6912 gimplify_and_add (x, ilist);
6915 /* If any copyin variable is passed by reference, we must ensure the
6916 master thread doesn't modify it before it is copied over in all
6917 threads. Similarly for variables in both firstprivate and
6918 lastprivate clauses we need to ensure the lastprivate copying
6919 happens after firstprivate copying in all threads. And similarly
6920 for UDRs if initializer expression refers to omp_orig. */
6921 if (copyin_by_ref || lastprivate_firstprivate
6922 || (reduction_omp_orig_ref
6923 && !ctx->scan_inclusive
6924 && !ctx->scan_exclusive))
6926 /* Don't add any barrier for #pragma omp simd or
6927 #pragma omp distribute. */
6928 if (!is_task_ctx (ctx)
6929 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6930 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6931 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6934 /* If max_vf is non-zero, then we can use only a vectorization factor
6935 up to the max_vf we chose. So stick it into the safelen clause. */
6936 if (maybe_ne (sctx.max_vf, 0U))
6938 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6939 OMP_CLAUSE_SAFELEN);
6940 poly_uint64 safe_len;
6941 if (c == NULL_TREE
6942 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6943 && maybe_gt (safe_len, sctx.max_vf)))
6945 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6946 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6947 sctx.max_vf);
6948 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6949 gimple_omp_for_set_clauses (ctx->stmt, c);
6954 /* Create temporary variables for lastprivate(conditional:) implementation
6955 in context CTX with CLAUSES. */
6957 static void
6958 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6960 tree iter_type = NULL_TREE;
6961 tree cond_ptr = NULL_TREE;
6962 tree iter_var = NULL_TREE;
6963 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6964 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6965 tree next = *clauses;
6966 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6967 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6968 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6970 if (is_simd)
6972 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6973 gcc_assert (cc);
6974 if (iter_type == NULL_TREE)
6976 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6977 iter_var = create_tmp_var_raw (iter_type);
6978 DECL_CONTEXT (iter_var) = current_function_decl;
6979 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6980 DECL_CHAIN (iter_var) = ctx->block_vars;
6981 ctx->block_vars = iter_var;
6982 tree c3
6983 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6984 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6985 OMP_CLAUSE_DECL (c3) = iter_var;
6986 OMP_CLAUSE_CHAIN (c3) = *clauses;
6987 *clauses = c3;
6988 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6990 next = OMP_CLAUSE_CHAIN (cc);
6991 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6992 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6993 ctx->lastprivate_conditional_map->put (o, v);
6994 continue;
6996 if (iter_type == NULL)
6998 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
7000 struct omp_for_data fd;
7001 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
7002 NULL);
7003 iter_type = unsigned_type_for (fd.iter_type);
7005 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
7006 iter_type = unsigned_type_node;
7007 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
7008 if (c2)
7010 cond_ptr
7011 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7012 OMP_CLAUSE_DECL (c2) = cond_ptr;
7014 else
7016 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7017 DECL_CONTEXT (cond_ptr) = current_function_decl;
7018 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7019 DECL_CHAIN (cond_ptr) = ctx->block_vars;
7020 ctx->block_vars = cond_ptr;
7021 c2 = build_omp_clause (UNKNOWN_LOCATION,
7022 OMP_CLAUSE__CONDTEMP_);
7023 OMP_CLAUSE_DECL (c2) = cond_ptr;
7024 OMP_CLAUSE_CHAIN (c2) = *clauses;
7025 *clauses = c2;
7027 iter_var = create_tmp_var_raw (iter_type);
7028 DECL_CONTEXT (iter_var) = current_function_decl;
7029 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7030 DECL_CHAIN (iter_var) = ctx->block_vars;
7031 ctx->block_vars = iter_var;
7032 tree c3
7033 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7034 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7035 OMP_CLAUSE_DECL (c3) = iter_var;
7036 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7037 OMP_CLAUSE_CHAIN (c2) = c3;
7038 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7040 tree v = create_tmp_var_raw (iter_type);
7041 DECL_CONTEXT (v) = current_function_decl;
7042 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7043 DECL_CHAIN (v) = ctx->block_vars;
7044 ctx->block_vars = v;
7045 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7046 ctx->lastprivate_conditional_map->put (o, v);
7051 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7052 both parallel and workshare constructs. PREDICATE may be NULL if it's
7053 always true. BODY_P is the sequence to insert early initialization
7054 if needed, STMT_LIST is where the non-conditional lastprivate handling
7055 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7056 section. */
7058 static void
7059 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7060 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7061 omp_context *ctx)
7063 tree x, c, label = NULL, orig_clauses = clauses;
7064 bool par_clauses = false;
7065 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7066 unsigned HOST_WIDE_INT conditional_off = 0;
7067 gimple_seq post_stmt_list = NULL;
7069 /* Early exit if there are no lastprivate or linear clauses. */
7070 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7071 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7072 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7073 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7074 break;
7075 if (clauses == NULL)
7077 /* If this was a workshare clause, see if it had been combined
7078 with its parallel. In that case, look for the clauses on the
7079 parallel statement itself. */
7080 if (is_parallel_ctx (ctx))
7081 return;
7083 ctx = ctx->outer;
7084 if (ctx == NULL || !is_parallel_ctx (ctx))
7085 return;
7087 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7088 OMP_CLAUSE_LASTPRIVATE);
7089 if (clauses == NULL)
7090 return;
7091 par_clauses = true;
7094 bool maybe_simt = false;
7095 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7096 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7098 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7099 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7100 if (simduid)
7101 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7104 if (predicate)
7106 gcond *stmt;
7107 tree label_true, arm1, arm2;
7108 enum tree_code pred_code = TREE_CODE (predicate);
7110 label = create_artificial_label (UNKNOWN_LOCATION);
7111 label_true = create_artificial_label (UNKNOWN_LOCATION);
7112 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7114 arm1 = TREE_OPERAND (predicate, 0);
7115 arm2 = TREE_OPERAND (predicate, 1);
7116 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7117 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7119 else
7121 arm1 = predicate;
7122 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7123 arm2 = boolean_false_node;
7124 pred_code = NE_EXPR;
7126 if (maybe_simt)
7128 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7129 c = fold_convert (integer_type_node, c);
7130 simtcond = create_tmp_var (integer_type_node);
7131 gimplify_assign (simtcond, c, stmt_list);
7132 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7133 1, simtcond);
7134 c = create_tmp_var (integer_type_node);
7135 gimple_call_set_lhs (g, c);
7136 gimple_seq_add_stmt (stmt_list, g);
7137 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7138 label_true, label);
7140 else
7141 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7142 gimple_seq_add_stmt (stmt_list, stmt);
7143 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7146 tree cond_ptr = NULL_TREE;
7147 for (c = clauses; c ;)
7149 tree var, new_var;
7150 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7151 gimple_seq *this_stmt_list = stmt_list;
7152 tree lab2 = NULL_TREE;
7154 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7155 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7156 && ctx->lastprivate_conditional_map
7157 && !ctx->combined_into_simd_safelen1)
7159 gcc_assert (body_p);
7160 if (simduid)
7161 goto next;
7162 if (cond_ptr == NULL_TREE)
7164 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7165 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7167 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7168 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7169 tree v = *ctx->lastprivate_conditional_map->get (o);
7170 gimplify_assign (v, build_zero_cst (type), body_p);
7171 this_stmt_list = cstmt_list;
7172 tree mem;
7173 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7175 mem = build2 (MEM_REF, type, cond_ptr,
7176 build_int_cst (TREE_TYPE (cond_ptr),
7177 conditional_off));
7178 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7180 else
7181 mem = build4 (ARRAY_REF, type, cond_ptr,
7182 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7183 tree mem2 = copy_node (mem);
7184 gimple_seq seq = NULL;
7185 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7186 gimple_seq_add_seq (this_stmt_list, seq);
7187 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7188 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7189 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7190 gimple_seq_add_stmt (this_stmt_list, g);
7191 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7192 gimplify_assign (mem2, v, this_stmt_list);
7194 else if (predicate
7195 && ctx->combined_into_simd_safelen1
7196 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7197 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7198 && ctx->lastprivate_conditional_map)
7199 this_stmt_list = &post_stmt_list;
7201 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7202 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7203 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7205 var = OMP_CLAUSE_DECL (c);
7206 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7207 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7208 && is_taskloop_ctx (ctx))
7210 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7211 new_var = lookup_decl (var, ctx->outer);
7213 else
7215 new_var = lookup_decl (var, ctx);
7216 /* Avoid uninitialized warnings for lastprivate and
7217 for linear iterators. */
7218 if (predicate
7219 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7220 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7221 suppress_warning (new_var, OPT_Wuninitialized);
7224 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7226 tree val = DECL_VALUE_EXPR (new_var);
7227 if (TREE_CODE (val) == ARRAY_REF
7228 && VAR_P (TREE_OPERAND (val, 0))
7229 && lookup_attribute ("omp simd array",
7230 DECL_ATTRIBUTES (TREE_OPERAND (val,
7231 0))))
7233 if (lastlane == NULL)
7235 lastlane = create_tmp_var (unsigned_type_node);
7236 gcall *g
7237 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7238 2, simduid,
7239 TREE_OPERAND (val, 1));
7240 gimple_call_set_lhs (g, lastlane);
7241 gimple_seq_add_stmt (this_stmt_list, g);
7243 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7244 TREE_OPERAND (val, 0), lastlane,
7245 NULL_TREE, NULL_TREE);
7246 TREE_THIS_NOTRAP (new_var) = 1;
7249 else if (maybe_simt)
7251 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7252 ? DECL_VALUE_EXPR (new_var)
7253 : new_var);
7254 if (simtlast == NULL)
7256 simtlast = create_tmp_var (unsigned_type_node);
7257 gcall *g = gimple_build_call_internal
7258 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7259 gimple_call_set_lhs (g, simtlast);
7260 gimple_seq_add_stmt (this_stmt_list, g);
7262 x = build_call_expr_internal_loc
7263 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7264 TREE_TYPE (val), 2, val, simtlast);
7265 new_var = unshare_expr (new_var);
7266 gimplify_assign (new_var, x, this_stmt_list);
7267 new_var = unshare_expr (new_var);
7270 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7271 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7273 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7274 gimple_seq_add_seq (this_stmt_list,
7275 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7276 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7278 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7279 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7281 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7282 gimple_seq_add_seq (this_stmt_list,
7283 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7284 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7287 x = NULL_TREE;
7288 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7289 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7290 && is_taskloop_ctx (ctx))
7292 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7293 ctx->outer->outer);
7294 if (is_global_var (ovar))
7295 x = ovar;
7297 if (!x)
7298 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7299 if (omp_privatize_by_reference (var))
7300 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7301 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7302 gimplify_and_add (x, this_stmt_list);
7304 if (lab2)
7305 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7308 next:
7309 c = OMP_CLAUSE_CHAIN (c);
7310 if (c == NULL && !par_clauses)
7312 /* If this was a workshare clause, see if it had been combined
7313 with its parallel. In that case, continue looking for the
7314 clauses also on the parallel statement itself. */
7315 if (is_parallel_ctx (ctx))
7316 break;
7318 ctx = ctx->outer;
7319 if (ctx == NULL || !is_parallel_ctx (ctx))
7320 break;
7322 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7323 OMP_CLAUSE_LASTPRIVATE);
7324 par_clauses = true;
7328 if (label)
7329 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7330 gimple_seq_add_seq (stmt_list, post_stmt_list);
7333 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7334 (which might be a placeholder). INNER is true if this is an inner
7335 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7336 join markers. Generate the before-loop forking sequence in
7337 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7338 general form of these sequences is
7340 GOACC_REDUCTION_SETUP
7341 GOACC_FORK
7342 GOACC_REDUCTION_INIT
7344 GOACC_REDUCTION_FINI
7345 GOACC_JOIN
7346 GOACC_REDUCTION_TEARDOWN. */
7348 static void
7349 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7350 gcall *fork, gcall *private_marker, gcall *join,
7351 gimple_seq *fork_seq, gimple_seq *join_seq,
7352 omp_context *ctx)
7354 gimple_seq before_fork = NULL;
7355 gimple_seq after_fork = NULL;
7356 gimple_seq before_join = NULL;
7357 gimple_seq after_join = NULL;
7358 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7359 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7360 unsigned offset = 0;
7362 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7363 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7365 /* No 'reduction' clauses on OpenACC 'kernels'. */
7366 gcc_checking_assert (!is_oacc_kernels (ctx));
7367 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7368 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7370 tree orig = OMP_CLAUSE_DECL (c);
7371 tree var = maybe_lookup_decl (orig, ctx);
7372 tree ref_to_res = NULL_TREE;
7373 tree incoming, outgoing, v1, v2, v3;
7374 bool is_private = false;
7376 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7377 if (rcode == MINUS_EXPR)
7378 rcode = PLUS_EXPR;
7379 else if (rcode == TRUTH_ANDIF_EXPR)
7380 rcode = BIT_AND_EXPR;
7381 else if (rcode == TRUTH_ORIF_EXPR)
7382 rcode = BIT_IOR_EXPR;
7383 tree op = build_int_cst (unsigned_type_node, rcode);
7385 if (!var)
7386 var = orig;
7388 incoming = outgoing = var;
7390 if (!inner)
7392 /* See if an outer construct also reduces this variable. */
7393 omp_context *outer = ctx;
7395 while (omp_context *probe = outer->outer)
7397 enum gimple_code type = gimple_code (probe->stmt);
7398 tree cls;
7400 switch (type)
7402 case GIMPLE_OMP_FOR:
7403 cls = gimple_omp_for_clauses (probe->stmt);
7404 break;
7406 case GIMPLE_OMP_TARGET:
7407 /* No 'reduction' clauses inside OpenACC 'kernels'
7408 regions. */
7409 gcc_checking_assert (!is_oacc_kernels (probe));
7411 if (!is_gimple_omp_offloaded (probe->stmt))
7412 goto do_lookup;
7414 cls = gimple_omp_target_clauses (probe->stmt);
7415 break;
7417 default:
7418 goto do_lookup;
7421 outer = probe;
7422 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7423 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7424 && orig == OMP_CLAUSE_DECL (cls))
7426 incoming = outgoing = lookup_decl (orig, probe);
7427 goto has_outer_reduction;
7429 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7430 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7431 && orig == OMP_CLAUSE_DECL (cls))
7433 is_private = true;
7434 goto do_lookup;
7438 do_lookup:
7439 /* This is the outermost construct with this reduction,
7440 see if there's a mapping for it. */
7441 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7442 && maybe_lookup_field (orig, outer) && !is_private)
7444 ref_to_res = build_receiver_ref (orig, false, outer);
7445 if (omp_privatize_by_reference (orig))
7446 ref_to_res = build_simple_mem_ref (ref_to_res);
7448 tree type = TREE_TYPE (var);
7449 if (POINTER_TYPE_P (type))
7450 type = TREE_TYPE (type);
7452 outgoing = var;
7453 incoming = omp_reduction_init_op (loc, rcode, type);
7455 else
7457 /* Try to look at enclosing contexts for reduction var,
7458 use original if no mapping found. */
7459 tree t = NULL_TREE;
7460 omp_context *c = ctx->outer;
7461 while (c && !t)
7463 t = maybe_lookup_decl (orig, c);
7464 c = c->outer;
7466 incoming = outgoing = (t ? t : orig);
7469 has_outer_reduction:;
7472 if (!ref_to_res)
7473 ref_to_res = integer_zero_node;
7475 if (omp_privatize_by_reference (orig))
7477 tree type = TREE_TYPE (var);
7478 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7480 if (!inner)
7482 tree x = create_tmp_var (TREE_TYPE (type), id);
7483 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7486 v1 = create_tmp_var (type, id);
7487 v2 = create_tmp_var (type, id);
7488 v3 = create_tmp_var (type, id);
7490 gimplify_assign (v1, var, fork_seq);
7491 gimplify_assign (v2, var, fork_seq);
7492 gimplify_assign (v3, var, fork_seq);
7494 var = build_simple_mem_ref (var);
7495 v1 = build_simple_mem_ref (v1);
7496 v2 = build_simple_mem_ref (v2);
7497 v3 = build_simple_mem_ref (v3);
7498 outgoing = build_simple_mem_ref (outgoing);
7500 if (!TREE_CONSTANT (incoming))
7501 incoming = build_simple_mem_ref (incoming);
7503 else
7504 v1 = v2 = v3 = var;
7506 /* Determine position in reduction buffer, which may be used
7507 by target. The parser has ensured that this is not a
7508 variable-sized type. */
7509 fixed_size_mode mode
7510 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7511 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7512 offset = (offset + align - 1) & ~(align - 1);
7513 tree off = build_int_cst (sizetype, offset);
7514 offset += GET_MODE_SIZE (mode);
7516 if (!init_code)
7518 init_code = build_int_cst (integer_type_node,
7519 IFN_GOACC_REDUCTION_INIT);
7520 fini_code = build_int_cst (integer_type_node,
7521 IFN_GOACC_REDUCTION_FINI);
7522 setup_code = build_int_cst (integer_type_node,
7523 IFN_GOACC_REDUCTION_SETUP);
7524 teardown_code = build_int_cst (integer_type_node,
7525 IFN_GOACC_REDUCTION_TEARDOWN);
7528 tree setup_call
7529 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7530 TREE_TYPE (var), 6, setup_code,
7531 unshare_expr (ref_to_res),
7532 incoming, level, op, off);
7533 tree init_call
7534 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7535 TREE_TYPE (var), 6, init_code,
7536 unshare_expr (ref_to_res),
7537 v1, level, op, off);
7538 tree fini_call
7539 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7540 TREE_TYPE (var), 6, fini_code,
7541 unshare_expr (ref_to_res),
7542 v2, level, op, off);
7543 tree teardown_call
7544 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7545 TREE_TYPE (var), 6, teardown_code,
7546 ref_to_res, v3, level, op, off);
7548 gimplify_assign (v1, setup_call, &before_fork);
7549 gimplify_assign (v2, init_call, &after_fork);
7550 gimplify_assign (v3, fini_call, &before_join);
7551 gimplify_assign (outgoing, teardown_call, &after_join);
7554 /* Now stitch things together. */
7555 gimple_seq_add_seq (fork_seq, before_fork);
7556 if (private_marker)
7557 gimple_seq_add_stmt (fork_seq, private_marker);
7558 if (fork)
7559 gimple_seq_add_stmt (fork_seq, fork);
7560 gimple_seq_add_seq (fork_seq, after_fork);
7562 gimple_seq_add_seq (join_seq, before_join);
7563 if (join)
7564 gimple_seq_add_stmt (join_seq, join);
7565 gimple_seq_add_seq (join_seq, after_join);
7568 /* Generate code to implement the REDUCTION clauses, append it
7569 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7570 that should be emitted also inside of the critical section,
7571 in that case clear *CLIST afterwards, otherwise leave it as is
7572 and let the caller emit it itself. */
7574 static void
7575 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7576 gimple_seq *clist, omp_context *ctx)
7578 gimple_seq sub_seq = NULL;
7579 gimple *stmt;
7580 tree x, c;
7581 int count = 0;
7583 /* OpenACC loop reductions are handled elsewhere. */
7584 if (is_gimple_omp_oacc (ctx->stmt))
7585 return;
7587 /* SIMD reductions are handled in lower_rec_input_clauses. */
7588 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7589 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7590 return;
7592 /* inscan reductions are handled elsewhere. */
7593 if (ctx->scan_inclusive || ctx->scan_exclusive)
7594 return;
7596 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7597 update in that case, otherwise use a lock. */
7598 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7599 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7600 && !OMP_CLAUSE_REDUCTION_TASK (c))
7602 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7603 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7605 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7606 count = -1;
7607 break;
7609 count++;
7612 if (count == 0)
7613 return;
7615 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7617 tree var, ref, new_var, orig_var;
7618 enum tree_code code;
7619 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7621 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7622 || OMP_CLAUSE_REDUCTION_TASK (c))
7623 continue;
7625 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7626 orig_var = var = OMP_CLAUSE_DECL (c);
7627 if (TREE_CODE (var) == MEM_REF)
7629 var = TREE_OPERAND (var, 0);
7630 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7631 var = TREE_OPERAND (var, 0);
7632 if (TREE_CODE (var) == ADDR_EXPR)
7633 var = TREE_OPERAND (var, 0);
7634 else
7636 /* If this is a pointer or referenced based array
7637 section, the var could be private in the outer
7638 context e.g. on orphaned loop construct. Pretend this
7639 is private variable's outer reference. */
7640 ccode = OMP_CLAUSE_PRIVATE;
7641 if (TREE_CODE (var) == INDIRECT_REF)
7642 var = TREE_OPERAND (var, 0);
7644 orig_var = var;
7645 if (is_variable_sized (var))
7647 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7648 var = DECL_VALUE_EXPR (var);
7649 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7650 var = TREE_OPERAND (var, 0);
7651 gcc_assert (DECL_P (var));
7654 new_var = lookup_decl (var, ctx);
7655 if (var == OMP_CLAUSE_DECL (c)
7656 && omp_privatize_by_reference (var))
7657 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7658 ref = build_outer_var_ref (var, ctx, ccode);
7659 code = OMP_CLAUSE_REDUCTION_CODE (c);
7661 /* reduction(-:var) sums up the partial results, so it acts
7662 identically to reduction(+:var). */
7663 if (code == MINUS_EXPR)
7664 code = PLUS_EXPR;
7666 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7667 if (count == 1)
7669 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7671 addr = save_expr (addr);
7672 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7673 tree new_var2 = new_var;
7674 tree ref2 = ref;
7675 if (is_truth_op)
7677 tree zero = build_zero_cst (TREE_TYPE (new_var));
7678 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7679 boolean_type_node, new_var, zero);
7680 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7681 ref, zero);
7683 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7684 new_var2);
7685 if (is_truth_op)
7686 x = fold_convert (TREE_TYPE (new_var), x);
7687 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7688 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7689 gimplify_and_add (x, stmt_seqp);
7690 return;
7692 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7694 tree d = OMP_CLAUSE_DECL (c);
7695 tree type = TREE_TYPE (d);
7696 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7697 tree i = create_tmp_var (TREE_TYPE (v));
7698 tree ptype = build_pointer_type (TREE_TYPE (type));
7699 tree bias = TREE_OPERAND (d, 1);
7700 d = TREE_OPERAND (d, 0);
7701 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7703 tree b = TREE_OPERAND (d, 1);
7704 b = maybe_lookup_decl (b, ctx);
7705 if (b == NULL)
7707 b = TREE_OPERAND (d, 1);
7708 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7710 if (integer_zerop (bias))
7711 bias = b;
7712 else
7714 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7715 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7716 TREE_TYPE (b), b, bias);
7718 d = TREE_OPERAND (d, 0);
7720 /* For ref build_outer_var_ref already performs this, so
7721 only new_var needs a dereference. */
7722 if (TREE_CODE (d) == INDIRECT_REF)
7724 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7725 gcc_assert (omp_privatize_by_reference (var)
7726 && var == orig_var);
7728 else if (TREE_CODE (d) == ADDR_EXPR)
7730 if (orig_var == var)
7732 new_var = build_fold_addr_expr (new_var);
7733 ref = build_fold_addr_expr (ref);
7736 else
7738 gcc_assert (orig_var == var);
7739 if (omp_privatize_by_reference (var))
7740 ref = build_fold_addr_expr (ref);
7742 if (DECL_P (v))
7744 tree t = maybe_lookup_decl (v, ctx);
7745 if (t)
7746 v = t;
7747 else
7748 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7749 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7751 if (!integer_zerop (bias))
7753 bias = fold_convert_loc (clause_loc, sizetype, bias);
7754 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7755 TREE_TYPE (new_var), new_var,
7756 unshare_expr (bias));
7757 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7758 TREE_TYPE (ref), ref, bias);
7760 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7761 ref = fold_convert_loc (clause_loc, ptype, ref);
7762 tree m = create_tmp_var (ptype);
7763 gimplify_assign (m, new_var, stmt_seqp);
7764 new_var = m;
7765 m = create_tmp_var (ptype);
7766 gimplify_assign (m, ref, stmt_seqp);
7767 ref = m;
7768 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7769 tree body = create_artificial_label (UNKNOWN_LOCATION);
7770 tree end = create_artificial_label (UNKNOWN_LOCATION);
7771 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7772 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7773 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7774 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7776 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7777 tree decl_placeholder
7778 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7779 SET_DECL_VALUE_EXPR (placeholder, out);
7780 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7781 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7782 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7783 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7784 gimple_seq_add_seq (&sub_seq,
7785 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7786 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7787 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7788 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7790 else
7792 tree out2 = out;
7793 tree priv2 = priv;
7794 if (is_truth_op)
7796 tree zero = build_zero_cst (TREE_TYPE (out));
7797 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7798 boolean_type_node, out, zero);
7799 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7800 boolean_type_node, priv, zero);
7802 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7803 if (is_truth_op)
7804 x = fold_convert (TREE_TYPE (out), x);
7805 out = unshare_expr (out);
7806 gimplify_assign (out, x, &sub_seq);
7808 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7809 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7810 gimple_seq_add_stmt (&sub_seq, g);
7811 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7812 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7813 gimple_seq_add_stmt (&sub_seq, g);
7814 g = gimple_build_assign (i, PLUS_EXPR, i,
7815 build_int_cst (TREE_TYPE (i), 1));
7816 gimple_seq_add_stmt (&sub_seq, g);
7817 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7818 gimple_seq_add_stmt (&sub_seq, g);
7819 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7821 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7823 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7825 if (omp_privatize_by_reference (var)
7826 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7827 TREE_TYPE (ref)))
7828 ref = build_fold_addr_expr_loc (clause_loc, ref);
7829 SET_DECL_VALUE_EXPR (placeholder, ref);
7830 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7831 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7832 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7833 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7834 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7836 else
7838 tree new_var2 = new_var;
7839 tree ref2 = ref;
7840 if (is_truth_op)
7842 tree zero = build_zero_cst (TREE_TYPE (new_var));
7843 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7844 boolean_type_node, new_var, zero);
7845 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7846 ref, zero);
7848 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7849 if (is_truth_op)
7850 x = fold_convert (TREE_TYPE (new_var), x);
7851 ref = build_outer_var_ref (var, ctx);
7852 gimplify_assign (ref, x, &sub_seq);
7856 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7858 gimple_seq_add_stmt (stmt_seqp, stmt);
7860 gimple_seq_add_seq (stmt_seqp, sub_seq);
7862 if (clist)
7864 gimple_seq_add_seq (stmt_seqp, *clist);
7865 *clist = NULL;
7868 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7870 gimple_seq_add_stmt (stmt_seqp, stmt);
7874 /* Generate code to implement the COPYPRIVATE clauses. */
7876 static void
7877 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7878 omp_context *ctx)
7880 tree c;
7882 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7884 tree var, new_var, ref, x;
7885 bool by_ref;
7886 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7888 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7889 continue;
7891 var = OMP_CLAUSE_DECL (c);
7892 by_ref = use_pointer_for_field (var, NULL);
7894 ref = build_sender_ref (var, ctx);
7895 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7896 if (by_ref)
7898 x = build_fold_addr_expr_loc (clause_loc, new_var);
7899 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7901 gimplify_assign (ref, x, slist);
7903 ref = build_receiver_ref (var, false, ctx);
7904 if (by_ref)
7906 ref = fold_convert_loc (clause_loc,
7907 build_pointer_type (TREE_TYPE (new_var)),
7908 ref);
7909 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7911 if (omp_privatize_by_reference (var))
7913 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7914 ref = build_simple_mem_ref_loc (clause_loc, ref);
7915 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7917 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7918 gimplify_and_add (x, rlist);
7923 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7924 and REDUCTION from the sender (aka parent) side. */
7926 static void
7927 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7928 omp_context *ctx)
7930 tree c, t;
7931 int ignored_looptemp = 0;
7932 bool is_taskloop = false;
7934 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7935 by GOMP_taskloop. */
7936 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7938 ignored_looptemp = 2;
7939 is_taskloop = true;
7942 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7944 tree val, ref, x, var;
7945 bool by_ref, do_in = false, do_out = false;
7946 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7948 switch (OMP_CLAUSE_CODE (c))
7950 case OMP_CLAUSE_PRIVATE:
7951 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7952 break;
7953 continue;
7954 case OMP_CLAUSE_FIRSTPRIVATE:
7955 case OMP_CLAUSE_COPYIN:
7956 case OMP_CLAUSE_LASTPRIVATE:
7957 case OMP_CLAUSE_IN_REDUCTION:
7958 case OMP_CLAUSE__REDUCTEMP_:
7959 break;
7960 case OMP_CLAUSE_REDUCTION:
7961 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7962 continue;
7963 break;
7964 case OMP_CLAUSE_SHARED:
7965 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7966 break;
7967 continue;
7968 case OMP_CLAUSE__LOOPTEMP_:
7969 if (ignored_looptemp)
7971 ignored_looptemp--;
7972 continue;
7974 break;
7975 default:
7976 continue;
7979 val = OMP_CLAUSE_DECL (c);
7980 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7981 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7982 && TREE_CODE (val) == MEM_REF)
7984 val = TREE_OPERAND (val, 0);
7985 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7986 val = TREE_OPERAND (val, 0);
7987 if (TREE_CODE (val) == INDIRECT_REF
7988 || TREE_CODE (val) == ADDR_EXPR)
7989 val = TREE_OPERAND (val, 0);
7990 if (is_variable_sized (val))
7991 continue;
7994 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7995 outer taskloop region. */
7996 omp_context *ctx_for_o = ctx;
7997 if (is_taskloop
7998 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7999 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8000 ctx_for_o = ctx->outer;
8002 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
8004 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8005 && is_global_var (var)
8006 && (val == OMP_CLAUSE_DECL (c)
8007 || !is_task_ctx (ctx)
8008 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8009 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8010 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8011 != POINTER_TYPE)))))
8012 continue;
8014 t = omp_member_access_dummy_var (var);
8015 if (t)
8017 var = DECL_VALUE_EXPR (var);
8018 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
8019 if (o != t)
8020 var = unshare_and_remap (var, t, o);
8021 else
8022 var = unshare_expr (var);
8025 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8027 /* Handle taskloop firstprivate/lastprivate, where the
8028 lastprivate on GIMPLE_OMP_TASK is represented as
8029 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8030 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
8031 x = omp_build_component_ref (ctx->sender_decl, f);
8032 if (use_pointer_for_field (val, ctx))
8033 var = build_fold_addr_expr (var);
8034 gimplify_assign (x, var, ilist);
8035 DECL_ABSTRACT_ORIGIN (f) = NULL;
8036 continue;
8039 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8040 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8041 || val == OMP_CLAUSE_DECL (c))
8042 && is_variable_sized (val))
8043 continue;
8044 by_ref = use_pointer_for_field (val, NULL);
8046 switch (OMP_CLAUSE_CODE (c))
8048 case OMP_CLAUSE_FIRSTPRIVATE:
8049 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8050 && !by_ref
8051 && is_task_ctx (ctx))
8052 suppress_warning (var);
8053 do_in = true;
8054 break;
8056 case OMP_CLAUSE_PRIVATE:
8057 case OMP_CLAUSE_COPYIN:
8058 case OMP_CLAUSE__LOOPTEMP_:
8059 case OMP_CLAUSE__REDUCTEMP_:
8060 do_in = true;
8061 break;
8063 case OMP_CLAUSE_LASTPRIVATE:
8064 if (by_ref || omp_privatize_by_reference (val))
8066 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8067 continue;
8068 do_in = true;
8070 else
8072 do_out = true;
8073 if (lang_hooks.decls.omp_private_outer_ref (val))
8074 do_in = true;
8076 break;
8078 case OMP_CLAUSE_REDUCTION:
8079 case OMP_CLAUSE_IN_REDUCTION:
8080 do_in = true;
8081 if (val == OMP_CLAUSE_DECL (c))
8083 if (is_task_ctx (ctx))
8084 by_ref = use_pointer_for_field (val, ctx);
8085 else
8086 do_out = !(by_ref || omp_privatize_by_reference (val));
8088 else
8089 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8090 break;
8092 default:
8093 gcc_unreachable ();
8096 if (do_in)
8098 ref = build_sender_ref (val, ctx);
8099 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8100 gimplify_assign (ref, x, ilist);
8101 if (is_task_ctx (ctx))
8102 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8105 if (do_out)
8107 ref = build_sender_ref (val, ctx);
8108 gimplify_assign (var, ref, olist);
8113 /* Generate code to implement SHARED from the sender (aka parent)
8114 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8115 list things that got automatically shared. */
8117 static void
8118 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8120 tree var, ovar, nvar, t, f, x, record_type;
8122 if (ctx->record_type == NULL)
8123 return;
8125 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8126 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8128 ovar = DECL_ABSTRACT_ORIGIN (f);
8129 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8130 continue;
8132 nvar = maybe_lookup_decl (ovar, ctx);
8133 if (!nvar
8134 || !DECL_HAS_VALUE_EXPR_P (nvar)
8135 || (ctx->allocate_map
8136 && ctx->allocate_map->get (ovar)))
8137 continue;
8139 /* If CTX is a nested parallel directive. Find the immediately
8140 enclosing parallel or workshare construct that contains a
8141 mapping for OVAR. */
8142 var = lookup_decl_in_outer_ctx (ovar, ctx);
8144 t = omp_member_access_dummy_var (var);
8145 if (t)
8147 var = DECL_VALUE_EXPR (var);
8148 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8149 if (o != t)
8150 var = unshare_and_remap (var, t, o);
8151 else
8152 var = unshare_expr (var);
8155 if (use_pointer_for_field (ovar, ctx))
8157 x = build_sender_ref (ovar, ctx);
8158 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8159 && TREE_TYPE (f) == TREE_TYPE (ovar))
8161 gcc_assert (is_parallel_ctx (ctx)
8162 && DECL_ARTIFICIAL (ovar));
8163 /* _condtemp_ clause. */
8164 var = build_constructor (TREE_TYPE (x), NULL);
8166 else
8167 var = build_fold_addr_expr (var);
8168 gimplify_assign (x, var, ilist);
8170 else
8172 x = build_sender_ref (ovar, ctx);
8173 gimplify_assign (x, var, ilist);
8175 if (!TREE_READONLY (var)
8176 /* We don't need to receive a new reference to a result
8177 or parm decl. In fact we may not store to it as we will
8178 invalidate any pending RSO and generate wrong gimple
8179 during inlining. */
8180 && !((TREE_CODE (var) == RESULT_DECL
8181 || TREE_CODE (var) == PARM_DECL)
8182 && DECL_BY_REFERENCE (var)))
8184 x = build_sender_ref (ovar, ctx);
8185 gimplify_assign (var, x, olist);
8191 /* Emit an OpenACC head marker call, encapulating the partitioning and
8192 other information that must be processed by the target compiler.
8193 Return the maximum number of dimensions the associated loop might
8194 be partitioned over. */
8196 static unsigned
8197 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8198 gimple_seq *seq, omp_context *ctx)
8200 unsigned levels = 0;
8201 unsigned tag = 0;
8202 tree gang_static = NULL_TREE;
8203 auto_vec<tree, 5> args;
8205 args.quick_push (build_int_cst
8206 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8207 args.quick_push (ddvar);
8208 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8210 switch (OMP_CLAUSE_CODE (c))
8212 case OMP_CLAUSE_GANG:
8213 tag |= OLF_DIM_GANG;
8214 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8215 /* static:* is represented by -1, and we can ignore it, as
8216 scheduling is always static. */
8217 if (gang_static && integer_minus_onep (gang_static))
8218 gang_static = NULL_TREE;
8219 levels++;
8220 break;
8222 case OMP_CLAUSE_WORKER:
8223 tag |= OLF_DIM_WORKER;
8224 levels++;
8225 break;
8227 case OMP_CLAUSE_VECTOR:
8228 tag |= OLF_DIM_VECTOR;
8229 levels++;
8230 break;
8232 case OMP_CLAUSE_SEQ:
8233 tag |= OLF_SEQ;
8234 break;
8236 case OMP_CLAUSE_AUTO:
8237 tag |= OLF_AUTO;
8238 break;
8240 case OMP_CLAUSE_INDEPENDENT:
8241 tag |= OLF_INDEPENDENT;
8242 break;
8244 case OMP_CLAUSE_TILE:
8245 tag |= OLF_TILE;
8246 break;
8248 default:
8249 continue;
8253 if (gang_static)
8255 if (DECL_P (gang_static))
8256 gang_static = build_outer_var_ref (gang_static, ctx);
8257 tag |= OLF_GANG_STATIC;
8260 omp_context *tgt = enclosing_target_ctx (ctx);
8261 if (!tgt || is_oacc_parallel_or_serial (tgt))
8263 else if (is_oacc_kernels (tgt))
8264 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8265 gcc_unreachable ();
8266 else if (is_oacc_kernels_decomposed_part (tgt))
8268 else
8269 gcc_unreachable ();
8271 /* In a parallel region, loops are implicitly INDEPENDENT. */
8272 if (!tgt || is_oacc_parallel_or_serial (tgt))
8273 tag |= OLF_INDEPENDENT;
8275 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8276 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8277 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8279 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8280 gcc_assert (!(tag & OLF_AUTO));
8283 if (tag & OLF_TILE)
8284 /* Tiling could use all 3 levels. */
8285 levels = 3;
8286 else
8288 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8289 Ensure at least one level, or 2 for possible auto
8290 partitioning */
8291 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8292 << OLF_DIM_BASE) | OLF_SEQ));
8294 if (levels < 1u + maybe_auto)
8295 levels = 1u + maybe_auto;
8298 args.quick_push (build_int_cst (integer_type_node, levels));
8299 args.quick_push (build_int_cst (integer_type_node, tag));
8300 if (gang_static)
8301 args.quick_push (gang_static);
8303 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8304 gimple_set_location (call, loc);
8305 gimple_set_lhs (call, ddvar);
8306 gimple_seq_add_stmt (seq, call);
8308 return levels;
8311 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8312 partitioning level of the enclosed region. */
8314 static void
8315 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8316 tree tofollow, gimple_seq *seq)
8318 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8319 : IFN_UNIQUE_OACC_TAIL_MARK);
8320 tree marker = build_int_cst (integer_type_node, marker_kind);
8321 int nargs = 2 + (tofollow != NULL_TREE);
8322 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8323 marker, ddvar, tofollow);
8324 gimple_set_location (call, loc);
8325 gimple_set_lhs (call, ddvar);
8326 gimple_seq_add_stmt (seq, call);
8329 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8330 the loop clauses, from which we extract reductions. Initialize
8331 HEAD and TAIL. */
8333 static void
8334 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8335 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8337 bool inner = false;
8338 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8339 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8341 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8343 if (private_marker)
8345 gimple_set_location (private_marker, loc);
8346 gimple_call_set_lhs (private_marker, ddvar);
8347 gimple_call_set_arg (private_marker, 1, ddvar);
8350 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8351 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8353 gcc_assert (count);
8354 for (unsigned done = 1; count; count--, done++)
8356 gimple_seq fork_seq = NULL;
8357 gimple_seq join_seq = NULL;
8359 tree place = build_int_cst (integer_type_node, -1);
8360 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8361 fork_kind, ddvar, place);
8362 gimple_set_location (fork, loc);
8363 gimple_set_lhs (fork, ddvar);
8365 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8366 join_kind, ddvar, place);
8367 gimple_set_location (join, loc);
8368 gimple_set_lhs (join, ddvar);
8370 /* Mark the beginning of this level sequence. */
8371 if (inner)
8372 lower_oacc_loop_marker (loc, ddvar, true,
8373 build_int_cst (integer_type_node, count),
8374 &fork_seq);
8375 lower_oacc_loop_marker (loc, ddvar, false,
8376 build_int_cst (integer_type_node, done),
8377 &join_seq);
8379 lower_oacc_reductions (loc, clauses, place, inner,
8380 fork, (count == 1) ? private_marker : NULL,
8381 join, &fork_seq, &join_seq, ctx);
8383 /* Append this level to head. */
8384 gimple_seq_add_seq (head, fork_seq);
8385 /* Prepend it to tail. */
8386 gimple_seq_add_seq (&join_seq, *tail);
8387 *tail = join_seq;
8389 inner = true;
8392 /* Mark the end of the sequence. */
8393 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8394 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8397 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8398 catch handler and return it. This prevents programs from violating the
8399 structured block semantics with throws. */
8401 static gimple_seq
8402 maybe_catch_exception (gimple_seq body)
8404 gimple *g;
8405 tree decl;
8407 if (!flag_exceptions)
8408 return body;
8410 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8411 decl = lang_hooks.eh_protect_cleanup_actions ();
8412 else
8413 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8415 g = gimple_build_eh_must_not_throw (decl);
8416 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8417 GIMPLE_TRY_CATCH);
8419 return gimple_seq_alloc_with_stmt (g);
8423 /* Routines to lower OMP directives into OMP-GIMPLE. */
8425 /* If ctx is a worksharing context inside of a cancellable parallel
8426 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8427 and conditional branch to parallel's cancel_label to handle
8428 cancellation in the implicit barrier. */
8430 static void
8431 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8432 gimple_seq *body)
8434 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8435 if (gimple_omp_return_nowait_p (omp_return))
8436 return;
8437 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8438 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8439 && outer->cancellable)
8441 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8442 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8443 tree lhs = create_tmp_var (c_bool_type);
8444 gimple_omp_return_set_lhs (omp_return, lhs);
8445 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8446 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8447 fold_convert (c_bool_type,
8448 boolean_false_node),
8449 outer->cancel_label, fallthru_label);
8450 gimple_seq_add_stmt (body, g);
8451 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8453 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8454 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8455 return;
8458 /* Find the first task_reduction or reduction clause or return NULL
8459 if there are none. */
8461 static inline tree
8462 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8463 enum omp_clause_code ccode)
8465 while (1)
8467 clauses = omp_find_clause (clauses, ccode);
8468 if (clauses == NULL_TREE)
8469 return NULL_TREE;
8470 if (ccode != OMP_CLAUSE_REDUCTION
8471 || code == OMP_TASKLOOP
8472 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8473 return clauses;
8474 clauses = OMP_CLAUSE_CHAIN (clauses);
8478 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8479 gimple_seq *, gimple_seq *);
8481 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8482 CTX is the enclosing OMP context for the current statement. */
8484 static void
8485 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8487 tree block, control;
8488 gimple_stmt_iterator tgsi;
8489 gomp_sections *stmt;
8490 gimple *t;
8491 gbind *new_stmt, *bind;
8492 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8494 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8496 push_gimplify_context ();
8498 dlist = NULL;
8499 ilist = NULL;
8501 tree rclauses
8502 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8503 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8504 tree rtmp = NULL_TREE;
8505 if (rclauses)
8507 tree type = build_pointer_type (pointer_sized_int_node);
8508 tree temp = create_tmp_var (type);
8509 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8510 OMP_CLAUSE_DECL (c) = temp;
8511 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8512 gimple_omp_sections_set_clauses (stmt, c);
8513 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8514 gimple_omp_sections_clauses (stmt),
8515 &ilist, &tred_dlist);
8516 rclauses = c;
8517 rtmp = make_ssa_name (type);
8518 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8521 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8522 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8524 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8525 &ilist, &dlist, ctx, NULL);
8527 control = create_tmp_var (unsigned_type_node, ".section");
8528 gimple_omp_sections_set_control (stmt, control);
8530 new_body = gimple_omp_body (stmt);
8531 gimple_omp_set_body (stmt, NULL);
8532 tgsi = gsi_start (new_body);
8533 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8535 omp_context *sctx;
8536 gimple *sec_start;
8538 sec_start = gsi_stmt (tgsi);
8539 sctx = maybe_lookup_ctx (sec_start);
8540 gcc_assert (sctx);
8542 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8543 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8544 GSI_CONTINUE_LINKING);
8545 gimple_omp_set_body (sec_start, NULL);
8547 if (gsi_one_before_end_p (tgsi))
8549 gimple_seq l = NULL;
8550 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8551 &ilist, &l, &clist, ctx);
8552 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8553 gimple_omp_section_set_last (sec_start);
8556 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8557 GSI_CONTINUE_LINKING);
8560 block = make_node (BLOCK);
8561 bind = gimple_build_bind (NULL, new_body, block);
8563 olist = NULL;
8564 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8565 &clist, ctx);
8566 if (clist)
8568 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8569 gcall *g = gimple_build_call (fndecl, 0);
8570 gimple_seq_add_stmt (&olist, g);
8571 gimple_seq_add_seq (&olist, clist);
8572 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8573 g = gimple_build_call (fndecl, 0);
8574 gimple_seq_add_stmt (&olist, g);
8577 block = make_node (BLOCK);
8578 new_stmt = gimple_build_bind (NULL, NULL, block);
8579 gsi_replace (gsi_p, new_stmt, true);
8581 pop_gimplify_context (new_stmt);
8582 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8583 BLOCK_VARS (block) = gimple_bind_vars (bind);
8584 if (BLOCK_VARS (block))
8585 TREE_USED (block) = 1;
8587 new_body = NULL;
8588 gimple_seq_add_seq (&new_body, ilist);
8589 gimple_seq_add_stmt (&new_body, stmt);
8590 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8591 gimple_seq_add_stmt (&new_body, bind);
8593 t = gimple_build_omp_continue (control, control);
8594 gimple_seq_add_stmt (&new_body, t);
8596 gimple_seq_add_seq (&new_body, olist);
8597 if (ctx->cancellable)
8598 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8599 gimple_seq_add_seq (&new_body, dlist);
8601 new_body = maybe_catch_exception (new_body);
8603 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8604 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8605 t = gimple_build_omp_return (nowait);
8606 gimple_seq_add_stmt (&new_body, t);
8607 gimple_seq_add_seq (&new_body, tred_dlist);
8608 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8610 if (rclauses)
8611 OMP_CLAUSE_DECL (rclauses) = rtmp;
8613 gimple_bind_set_body (new_stmt, new_body);
8617 /* A subroutine of lower_omp_single. Expand the simple form of
8618 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8620 if (GOMP_single_start ())
8621 BODY;
8622 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8624 FIXME. It may be better to delay expanding the logic of this until
8625 pass_expand_omp. The expanded logic may make the job more difficult
8626 to a synchronization analysis pass. */
8628 static void
8629 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8631 location_t loc = gimple_location (single_stmt);
8632 tree tlabel = create_artificial_label (loc);
8633 tree flabel = create_artificial_label (loc);
8634 gimple *call, *cond;
8635 tree lhs, decl;
8637 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8638 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8639 call = gimple_build_call (decl, 0);
8640 gimple_call_set_lhs (call, lhs);
8641 gimple_seq_add_stmt (pre_p, call);
8643 cond = gimple_build_cond (EQ_EXPR, lhs,
8644 fold_convert_loc (loc, TREE_TYPE (lhs),
8645 boolean_true_node),
8646 tlabel, flabel);
8647 gimple_seq_add_stmt (pre_p, cond);
8648 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8649 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8650 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8654 /* A subroutine of lower_omp_single. Expand the simple form of
8655 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8657 #pragma omp single copyprivate (a, b, c)
8659 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8662 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8664 BODY;
8665 copyout.a = a;
8666 copyout.b = b;
8667 copyout.c = c;
8668 GOMP_single_copy_end (&copyout);
8670 else
8672 a = copyout_p->a;
8673 b = copyout_p->b;
8674 c = copyout_p->c;
8676 GOMP_barrier ();
8679 FIXME. It may be better to delay expanding the logic of this until
8680 pass_expand_omp. The expanded logic may make the job more difficult
8681 to a synchronization analysis pass. */
8683 static void
8684 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8685 omp_context *ctx)
8687 tree ptr_type, t, l0, l1, l2, bfn_decl;
8688 gimple_seq copyin_seq;
8689 location_t loc = gimple_location (single_stmt);
8691 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8693 ptr_type = build_pointer_type (ctx->record_type);
8694 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8696 l0 = create_artificial_label (loc);
8697 l1 = create_artificial_label (loc);
8698 l2 = create_artificial_label (loc);
8700 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8701 t = build_call_expr_loc (loc, bfn_decl, 0);
8702 t = fold_convert_loc (loc, ptr_type, t);
8703 gimplify_assign (ctx->receiver_decl, t, pre_p);
8705 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8706 build_int_cst (ptr_type, 0));
8707 t = build3 (COND_EXPR, void_type_node, t,
8708 build_and_jump (&l0), build_and_jump (&l1));
8709 gimplify_and_add (t, pre_p);
8711 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8713 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8715 copyin_seq = NULL;
8716 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8717 &copyin_seq, ctx);
8719 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8720 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8721 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8722 gimplify_and_add (t, pre_p);
8724 t = build_and_jump (&l2);
8725 gimplify_and_add (t, pre_p);
8727 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8729 gimple_seq_add_seq (pre_p, copyin_seq);
8731 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8735 /* Expand code for an OpenMP single directive. */
8737 static void
8738 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8740 tree block;
8741 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8742 gbind *bind;
8743 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8745 push_gimplify_context ();
8747 block = make_node (BLOCK);
8748 bind = gimple_build_bind (NULL, NULL, block);
8749 gsi_replace (gsi_p, bind, true);
8750 bind_body = NULL;
8751 dlist = NULL;
8752 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8753 &bind_body, &dlist, ctx, NULL);
8754 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8756 gimple_seq_add_stmt (&bind_body, single_stmt);
8758 if (ctx->record_type)
8759 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8760 else
8761 lower_omp_single_simple (single_stmt, &bind_body);
8763 gimple_omp_set_body (single_stmt, NULL);
8765 gimple_seq_add_seq (&bind_body, dlist);
8767 bind_body = maybe_catch_exception (bind_body);
8769 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8770 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8771 gimple *g = gimple_build_omp_return (nowait);
8772 gimple_seq_add_stmt (&bind_body_tail, g);
8773 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8774 if (ctx->record_type)
8776 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8777 tree clobber = build_clobber (ctx->record_type);
8778 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8779 clobber), GSI_SAME_STMT);
8781 gimple_seq_add_seq (&bind_body, bind_body_tail);
8782 gimple_bind_set_body (bind, bind_body);
8784 pop_gimplify_context (bind);
8786 gimple_bind_append_vars (bind, ctx->block_vars);
8787 BLOCK_VARS (block) = ctx->block_vars;
8788 if (BLOCK_VARS (block))
8789 TREE_USED (block) = 1;
8793 /* Lower code for an OMP scope directive. */
8795 static void
8796 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8798 tree block;
8799 gimple *scope_stmt = gsi_stmt (*gsi_p);
8800 gbind *bind;
8801 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8802 gimple_seq tred_dlist = NULL;
8804 push_gimplify_context ();
8806 block = make_node (BLOCK);
8807 bind = gimple_build_bind (NULL, NULL, block);
8808 gsi_replace (gsi_p, bind, true);
8809 bind_body = NULL;
8810 dlist = NULL;
8812 tree rclauses
8813 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8814 OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8815 if (rclauses)
8817 tree type = build_pointer_type (pointer_sized_int_node);
8818 tree temp = create_tmp_var (type);
8819 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8820 OMP_CLAUSE_DECL (c) = temp;
8821 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8822 gimple_omp_scope_set_clauses (scope_stmt, c);
8823 lower_omp_task_reductions (ctx, OMP_SCOPE,
8824 gimple_omp_scope_clauses (scope_stmt),
8825 &bind_body, &tred_dlist);
8826 rclauses = c;
8827 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8828 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8829 gimple_seq_add_stmt (&bind_body, stmt);
8832 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8833 &bind_body, &dlist, ctx, NULL);
8834 lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8836 gimple_seq_add_stmt (&bind_body, scope_stmt);
8838 gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8840 gimple_omp_set_body (scope_stmt, NULL);
8842 gimple_seq clist = NULL;
8843 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8844 &bind_body, &clist, ctx);
8845 if (clist)
8847 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8848 gcall *g = gimple_build_call (fndecl, 0);
8849 gimple_seq_add_stmt (&bind_body, g);
8850 gimple_seq_add_seq (&bind_body, clist);
8851 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8852 g = gimple_build_call (fndecl, 0);
8853 gimple_seq_add_stmt (&bind_body, g);
8856 gimple_seq_add_seq (&bind_body, dlist);
8858 bind_body = maybe_catch_exception (bind_body);
8860 bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
8861 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8862 gimple *g = gimple_build_omp_return (nowait);
8863 gimple_seq_add_stmt (&bind_body_tail, g);
8864 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
8865 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8866 if (ctx->record_type)
8868 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8869 tree clobber = build_clobber (ctx->record_type);
8870 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8871 clobber), GSI_SAME_STMT);
8873 gimple_seq_add_seq (&bind_body, bind_body_tail);
8875 gimple_bind_set_body (bind, bind_body);
8877 pop_gimplify_context (bind);
8879 gimple_bind_append_vars (bind, ctx->block_vars);
8880 BLOCK_VARS (block) = ctx->block_vars;
8881 if (BLOCK_VARS (block))
8882 TREE_USED (block) = 1;
8884 /* Expand code for an OpenMP master or masked directive. */
8886 static void
8887 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8889 tree block, lab = NULL, x, bfn_decl;
8890 gimple *stmt = gsi_stmt (*gsi_p);
8891 gbind *bind;
8892 location_t loc = gimple_location (stmt);
8893 gimple_seq tseq;
8894 tree filter = integer_zero_node;
8896 push_gimplify_context ();
8898 if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
8900 filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
8901 OMP_CLAUSE_FILTER);
8902 if (filter)
8903 filter = fold_convert (integer_type_node,
8904 OMP_CLAUSE_FILTER_EXPR (filter));
8905 else
8906 filter = integer_zero_node;
8908 block = make_node (BLOCK);
8909 bind = gimple_build_bind (NULL, NULL, block);
8910 gsi_replace (gsi_p, bind, true);
8911 gimple_bind_add_stmt (bind, stmt);
8913 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8914 x = build_call_expr_loc (loc, bfn_decl, 0);
8915 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
8916 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8917 tseq = NULL;
8918 gimplify_and_add (x, &tseq);
8919 gimple_bind_add_seq (bind, tseq);
8921 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8922 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8923 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8924 gimple_omp_set_body (stmt, NULL);
8926 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8928 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8930 pop_gimplify_context (bind);
8932 gimple_bind_append_vars (bind, ctx->block_vars);
8933 BLOCK_VARS (block) = ctx->block_vars;
8936 /* Helper function for lower_omp_task_reductions. For a specific PASS
8937 find out the current clause it should be processed, or return false
8938 if all have been processed already. */
8940 static inline bool
8941 omp_task_reduction_iterate (int pass, enum tree_code code,
8942 enum omp_clause_code ccode, tree *c, tree *decl,
8943 tree *type, tree *next)
8945 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8947 if (ccode == OMP_CLAUSE_REDUCTION
8948 && code != OMP_TASKLOOP
8949 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8950 continue;
8951 *decl = OMP_CLAUSE_DECL (*c);
8952 *type = TREE_TYPE (*decl);
8953 if (TREE_CODE (*decl) == MEM_REF)
8955 if (pass != 1)
8956 continue;
8958 else
8960 if (omp_privatize_by_reference (*decl))
8961 *type = TREE_TYPE (*type);
8962 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8963 continue;
8965 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8966 return true;
8968 *decl = NULL_TREE;
8969 *type = NULL_TREE;
8970 *next = NULL_TREE;
8971 return false;
8974 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8975 OMP_TASKGROUP only with task modifier). Register mapping of those in
8976 START sequence and reducing them and unregister them in the END sequence. */
8978 static void
8979 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8980 gimple_seq *start, gimple_seq *end)
8982 enum omp_clause_code ccode
8983 = (code == OMP_TASKGROUP
8984 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8985 tree cancellable = NULL_TREE;
8986 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8987 if (clauses == NULL_TREE)
8988 return;
8989 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
8991 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8992 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8993 && outer->cancellable)
8995 cancellable = error_mark_node;
8996 break;
8998 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8999 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
9000 break;
9002 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
9003 tree *last = &TYPE_FIELDS (record_type);
9004 unsigned cnt = 0;
9005 if (cancellable)
9007 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9008 ptr_type_node);
9009 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9010 integer_type_node);
9011 *last = field;
9012 DECL_CHAIN (field) = ifield;
9013 last = &DECL_CHAIN (ifield);
9014 DECL_CONTEXT (field) = record_type;
9015 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9016 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9017 DECL_CONTEXT (ifield) = record_type;
9018 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9019 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9021 for (int pass = 0; pass < 2; pass++)
9023 tree decl, type, next;
9024 for (tree c = clauses;
9025 omp_task_reduction_iterate (pass, code, ccode,
9026 &c, &decl, &type, &next); c = next)
9028 ++cnt;
9029 tree new_type = type;
9030 if (ctx->outer)
9031 new_type = remap_type (type, &ctx->outer->cb);
9032 tree field
9033 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9034 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9035 new_type);
9036 if (DECL_P (decl) && type == TREE_TYPE (decl))
9038 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9039 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9040 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9042 else
9043 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9044 DECL_CONTEXT (field) = record_type;
9045 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9046 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9047 *last = field;
9048 last = &DECL_CHAIN (field);
9049 tree bfield
9050 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9051 boolean_type_node);
9052 DECL_CONTEXT (bfield) = record_type;
9053 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9054 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9055 *last = bfield;
9056 last = &DECL_CHAIN (bfield);
9059 *last = NULL_TREE;
9060 layout_type (record_type);
9062 /* Build up an array which registers with the runtime all the reductions
9063 and deregisters them at the end. Format documented in libgomp/task.c. */
9064 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9065 tree avar = create_tmp_var_raw (atype);
9066 gimple_add_tmp_var (avar);
9067 TREE_ADDRESSABLE (avar) = 1;
9068 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9069 NULL_TREE, NULL_TREE);
9070 tree t = build_int_cst (pointer_sized_int_node, cnt);
9071 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9072 gimple_seq seq = NULL;
9073 tree sz = fold_convert (pointer_sized_int_node,
9074 TYPE_SIZE_UNIT (record_type));
9075 int cachesz = 64;
9076 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9077 build_int_cst (pointer_sized_int_node, cachesz - 1));
9078 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9079 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9080 ctx->task_reductions.create (1 + cnt);
9081 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9082 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9083 ? sz : NULL_TREE);
9084 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9085 gimple_seq_add_seq (start, seq);
9086 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9087 NULL_TREE, NULL_TREE);
9088 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9089 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9090 NULL_TREE, NULL_TREE);
9091 t = build_int_cst (pointer_sized_int_node,
9092 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9093 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9094 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9095 NULL_TREE, NULL_TREE);
9096 t = build_int_cst (pointer_sized_int_node, -1);
9097 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9098 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9099 NULL_TREE, NULL_TREE);
9100 t = build_int_cst (pointer_sized_int_node, 0);
9101 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9103 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9104 and for each task reduction checks a bool right after the private variable
9105 within that thread's chunk; if the bool is clear, it hasn't been
9106 initialized and thus isn't going to be reduced nor destructed, otherwise
9107 reduce and destruct it. */
9108 tree idx = create_tmp_var (size_type_node);
9109 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9110 tree num_thr_sz = create_tmp_var (size_type_node);
9111 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9112 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9113 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9114 gimple *g;
9115 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9117 /* For worksharing constructs or scope, only perform it in the master
9118 thread, with the exception of cancelled implicit barriers - then only
9119 handle the current thread. */
9120 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9121 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9122 tree thr_num = create_tmp_var (integer_type_node);
9123 g = gimple_build_call (t, 0);
9124 gimple_call_set_lhs (g, thr_num);
9125 gimple_seq_add_stmt (end, g);
9126 if (cancellable)
9128 tree c;
9129 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9130 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9131 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9132 if (code == OMP_FOR)
9133 c = gimple_omp_for_clauses (ctx->stmt);
9134 else if (code == OMP_SECTIONS)
9135 c = gimple_omp_sections_clauses (ctx->stmt);
9136 else /* if (code == OMP_SCOPE) */
9137 c = gimple_omp_scope_clauses (ctx->stmt);
9138 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9139 cancellable = c;
9140 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9141 lab5, lab6);
9142 gimple_seq_add_stmt (end, g);
9143 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9144 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9145 gimple_seq_add_stmt (end, g);
9146 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9147 build_one_cst (TREE_TYPE (idx)));
9148 gimple_seq_add_stmt (end, g);
9149 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9150 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9152 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9153 gimple_seq_add_stmt (end, g);
9154 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9156 if (code != OMP_PARALLEL)
9158 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9159 tree num_thr = create_tmp_var (integer_type_node);
9160 g = gimple_build_call (t, 0);
9161 gimple_call_set_lhs (g, num_thr);
9162 gimple_seq_add_stmt (end, g);
9163 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9164 gimple_seq_add_stmt (end, g);
9165 if (cancellable)
9166 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9168 else
9170 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9171 OMP_CLAUSE__REDUCTEMP_);
9172 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9173 t = fold_convert (size_type_node, t);
9174 gimplify_assign (num_thr_sz, t, end);
9176 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9177 NULL_TREE, NULL_TREE);
9178 tree data = create_tmp_var (pointer_sized_int_node);
9179 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9180 if (code == OMP_TASKLOOP)
9182 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9183 g = gimple_build_cond (NE_EXPR, data,
9184 build_zero_cst (pointer_sized_int_node),
9185 lab1, lab7);
9186 gimple_seq_add_stmt (end, g);
9188 gimple_seq_add_stmt (end, gimple_build_label (lab1));
9189 tree ptr;
9190 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9191 ptr = create_tmp_var (build_pointer_type (record_type));
9192 else
9193 ptr = create_tmp_var (ptr_type_node);
9194 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9196 tree field = TYPE_FIELDS (record_type);
9197 cnt = 0;
9198 if (cancellable)
9199 field = DECL_CHAIN (DECL_CHAIN (field));
9200 for (int pass = 0; pass < 2; pass++)
9202 tree decl, type, next;
9203 for (tree c = clauses;
9204 omp_task_reduction_iterate (pass, code, ccode,
9205 &c, &decl, &type, &next); c = next)
9207 tree var = decl, ref;
9208 if (TREE_CODE (decl) == MEM_REF)
9210 var = TREE_OPERAND (var, 0);
9211 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9212 var = TREE_OPERAND (var, 0);
9213 tree v = var;
9214 if (TREE_CODE (var) == ADDR_EXPR)
9215 var = TREE_OPERAND (var, 0);
9216 else if (TREE_CODE (var) == INDIRECT_REF)
9217 var = TREE_OPERAND (var, 0);
9218 tree orig_var = var;
9219 if (is_variable_sized (var))
9221 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9222 var = DECL_VALUE_EXPR (var);
9223 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
9224 var = TREE_OPERAND (var, 0);
9225 gcc_assert (DECL_P (var));
9227 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9228 if (orig_var != var)
9229 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9230 else if (TREE_CODE (v) == ADDR_EXPR)
9231 t = build_fold_addr_expr (t);
9232 else if (TREE_CODE (v) == INDIRECT_REF)
9233 t = build_fold_indirect_ref (t);
9234 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9236 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9237 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9238 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9240 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9241 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9242 fold_convert (size_type_node,
9243 TREE_OPERAND (decl, 1)));
9245 else
9247 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9248 if (!omp_privatize_by_reference (decl))
9249 t = build_fold_addr_expr (t);
9251 t = fold_convert (pointer_sized_int_node, t);
9252 seq = NULL;
9253 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9254 gimple_seq_add_seq (start, seq);
9255 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9256 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9257 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9258 t = unshare_expr (byte_position (field));
9259 t = fold_convert (pointer_sized_int_node, t);
9260 ctx->task_reduction_map->put (c, cnt);
9261 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9262 ? t : NULL_TREE);
9263 seq = NULL;
9264 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9265 gimple_seq_add_seq (start, seq);
9266 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9267 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9268 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9270 tree bfield = DECL_CHAIN (field);
9271 tree cond;
9272 if (code == OMP_PARALLEL
9273 || code == OMP_FOR
9274 || code == OMP_SECTIONS
9275 || code == OMP_SCOPE)
9276 /* In parallel, worksharing or scope all threads unconditionally
9277 initialize all their task reduction private variables. */
9278 cond = boolean_true_node;
9279 else if (TREE_TYPE (ptr) == ptr_type_node)
9281 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9282 unshare_expr (byte_position (bfield)));
9283 seq = NULL;
9284 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9285 gimple_seq_add_seq (end, seq);
9286 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9287 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9288 build_int_cst (pbool, 0));
9290 else
9291 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9292 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9293 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9294 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9295 tree condv = create_tmp_var (boolean_type_node);
9296 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9297 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9298 lab3, lab4);
9299 gimple_seq_add_stmt (end, g);
9300 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9301 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9303 /* If this reduction doesn't need destruction and parallel
9304 has been cancelled, there is nothing to do for this
9305 reduction, so jump around the merge operation. */
9306 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9307 g = gimple_build_cond (NE_EXPR, cancellable,
9308 build_zero_cst (TREE_TYPE (cancellable)),
9309 lab4, lab5);
9310 gimple_seq_add_stmt (end, g);
9311 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9314 tree new_var;
9315 if (TREE_TYPE (ptr) == ptr_type_node)
9317 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9318 unshare_expr (byte_position (field)));
9319 seq = NULL;
9320 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9321 gimple_seq_add_seq (end, seq);
9322 tree pbool = build_pointer_type (TREE_TYPE (field));
9323 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9324 build_int_cst (pbool, 0));
9326 else
9327 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9328 build_simple_mem_ref (ptr), field, NULL_TREE);
9330 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9331 if (TREE_CODE (decl) != MEM_REF
9332 && omp_privatize_by_reference (decl))
9333 ref = build_simple_mem_ref (ref);
9334 /* reduction(-:var) sums up the partial results, so it acts
9335 identically to reduction(+:var). */
9336 if (rcode == MINUS_EXPR)
9337 rcode = PLUS_EXPR;
9338 if (TREE_CODE (decl) == MEM_REF)
9340 tree type = TREE_TYPE (new_var);
9341 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9342 tree i = create_tmp_var (TREE_TYPE (v));
9343 tree ptype = build_pointer_type (TREE_TYPE (type));
9344 if (DECL_P (v))
9346 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9347 tree vv = create_tmp_var (TREE_TYPE (v));
9348 gimplify_assign (vv, v, start);
9349 v = vv;
9351 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9352 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9353 new_var = build_fold_addr_expr (new_var);
9354 new_var = fold_convert (ptype, new_var);
9355 ref = fold_convert (ptype, ref);
9356 tree m = create_tmp_var (ptype);
9357 gimplify_assign (m, new_var, end);
9358 new_var = m;
9359 m = create_tmp_var (ptype);
9360 gimplify_assign (m, ref, end);
9361 ref = m;
9362 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9363 tree body = create_artificial_label (UNKNOWN_LOCATION);
9364 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9365 gimple_seq_add_stmt (end, gimple_build_label (body));
9366 tree priv = build_simple_mem_ref (new_var);
9367 tree out = build_simple_mem_ref (ref);
9368 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9370 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9371 tree decl_placeholder
9372 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9373 tree lab6 = NULL_TREE;
9374 if (cancellable)
9376 /* If this reduction needs destruction and parallel
9377 has been cancelled, jump around the merge operation
9378 to the destruction. */
9379 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9380 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9381 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9382 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9383 lab6, lab5);
9384 gimple_seq_add_stmt (end, g);
9385 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9387 SET_DECL_VALUE_EXPR (placeholder, out);
9388 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9389 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9390 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9391 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9392 gimple_seq_add_seq (end,
9393 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9394 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9395 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9397 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9398 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9400 if (cancellable)
9401 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9402 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9403 if (x)
9405 gimple_seq tseq = NULL;
9406 gimplify_stmt (&x, &tseq);
9407 gimple_seq_add_seq (end, tseq);
9410 else
9412 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9413 out = unshare_expr (out);
9414 gimplify_assign (out, x, end);
9416 gimple *g
9417 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9418 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9419 gimple_seq_add_stmt (end, g);
9420 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9421 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9422 gimple_seq_add_stmt (end, g);
9423 g = gimple_build_assign (i, PLUS_EXPR, i,
9424 build_int_cst (TREE_TYPE (i), 1));
9425 gimple_seq_add_stmt (end, g);
9426 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9427 gimple_seq_add_stmt (end, g);
9428 gimple_seq_add_stmt (end, gimple_build_label (endl));
9430 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9432 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9433 tree oldv = NULL_TREE;
9434 tree lab6 = NULL_TREE;
9435 if (cancellable)
9437 /* If this reduction needs destruction and parallel
9438 has been cancelled, jump around the merge operation
9439 to the destruction. */
9440 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9441 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9442 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9443 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9444 lab6, lab5);
9445 gimple_seq_add_stmt (end, g);
9446 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9448 if (omp_privatize_by_reference (decl)
9449 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9450 TREE_TYPE (ref)))
9451 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9452 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9453 tree refv = create_tmp_var (TREE_TYPE (ref));
9454 gimplify_assign (refv, ref, end);
9455 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9456 SET_DECL_VALUE_EXPR (placeholder, ref);
9457 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9458 tree d = maybe_lookup_decl (decl, ctx);
9459 gcc_assert (d);
9460 if (DECL_HAS_VALUE_EXPR_P (d))
9461 oldv = DECL_VALUE_EXPR (d);
9462 if (omp_privatize_by_reference (var))
9464 tree v = fold_convert (TREE_TYPE (d),
9465 build_fold_addr_expr (new_var));
9466 SET_DECL_VALUE_EXPR (d, v);
9468 else
9469 SET_DECL_VALUE_EXPR (d, new_var);
9470 DECL_HAS_VALUE_EXPR_P (d) = 1;
9471 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9472 if (oldv)
9473 SET_DECL_VALUE_EXPR (d, oldv);
9474 else
9476 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9477 DECL_HAS_VALUE_EXPR_P (d) = 0;
9479 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9480 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9481 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9482 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9483 if (cancellable)
9484 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9485 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9486 if (x)
9488 gimple_seq tseq = NULL;
9489 gimplify_stmt (&x, &tseq);
9490 gimple_seq_add_seq (end, tseq);
9493 else
9495 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9496 ref = unshare_expr (ref);
9497 gimplify_assign (ref, x, end);
9499 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9500 ++cnt;
9501 field = DECL_CHAIN (bfield);
9505 if (code == OMP_TASKGROUP)
9507 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9508 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9509 gimple_seq_add_stmt (start, g);
9511 else
9513 tree c;
9514 if (code == OMP_FOR)
9515 c = gimple_omp_for_clauses (ctx->stmt);
9516 else if (code == OMP_SECTIONS)
9517 c = gimple_omp_sections_clauses (ctx->stmt);
9518 else if (code == OMP_SCOPE)
9519 c = gimple_omp_scope_clauses (ctx->stmt);
9520 else
9521 c = gimple_omp_taskreg_clauses (ctx->stmt);
9522 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9523 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9524 build_fold_addr_expr (avar));
9525 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9528 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9529 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9530 size_one_node));
9531 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9532 gimple_seq_add_stmt (end, g);
9533 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9534 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9536 enum built_in_function bfn
9537 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9538 t = builtin_decl_explicit (bfn);
9539 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9540 tree arg;
9541 if (cancellable)
9543 arg = create_tmp_var (c_bool_type);
9544 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9545 cancellable));
9547 else
9548 arg = build_int_cst (c_bool_type, 0);
9549 g = gimple_build_call (t, 1, arg);
9551 else
9553 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9554 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9556 gimple_seq_add_stmt (end, g);
9557 if (lab7)
9558 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9559 t = build_constructor (atype, NULL);
9560 TREE_THIS_VOLATILE (t) = 1;
9561 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9564 /* Expand code for an OpenMP taskgroup directive. */
9566 static void
9567 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9569 gimple *stmt = gsi_stmt (*gsi_p);
9570 gcall *x;
9571 gbind *bind;
9572 gimple_seq dseq = NULL;
9573 tree block = make_node (BLOCK);
9575 bind = gimple_build_bind (NULL, NULL, block);
9576 gsi_replace (gsi_p, bind, true);
9577 gimple_bind_add_stmt (bind, stmt);
9579 push_gimplify_context ();
9581 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9583 gimple_bind_add_stmt (bind, x);
9585 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9586 gimple_omp_taskgroup_clauses (stmt),
9587 gimple_bind_body_ptr (bind), &dseq);
9589 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9590 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9591 gimple_omp_set_body (stmt, NULL);
9593 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9594 gimple_bind_add_seq (bind, dseq);
9596 pop_gimplify_context (bind);
9598 gimple_bind_append_vars (bind, ctx->block_vars);
9599 BLOCK_VARS (block) = ctx->block_vars;
9603 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9605 static void
9606 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9607 omp_context *ctx)
9609 struct omp_for_data fd;
9610 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9611 return;
9613 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9614 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9615 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9616 if (!fd.ordered)
9617 return;
9619 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9620 tree c = gimple_omp_ordered_clauses (ord_stmt);
9621 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9622 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9624 /* Merge depend clauses from multiple adjacent
9625 #pragma omp ordered depend(sink:...) constructs
9626 into one #pragma omp ordered depend(sink:...), so that
9627 we can optimize them together. */
9628 gimple_stmt_iterator gsi = *gsi_p;
9629 gsi_next (&gsi);
9630 while (!gsi_end_p (gsi))
9632 gimple *stmt = gsi_stmt (gsi);
9633 if (is_gimple_debug (stmt)
9634 || gimple_code (stmt) == GIMPLE_NOP)
9636 gsi_next (&gsi);
9637 continue;
9639 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9640 break;
9641 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9642 c = gimple_omp_ordered_clauses (ord_stmt2);
9643 if (c == NULL_TREE
9644 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
9645 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9646 break;
9647 while (*list_p)
9648 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9649 *list_p = c;
9650 gsi_remove (&gsi, true);
9654 /* Canonicalize sink dependence clauses into one folded clause if
9655 possible.
9657 The basic algorithm is to create a sink vector whose first
9658 element is the GCD of all the first elements, and whose remaining
9659 elements are the minimum of the subsequent columns.
9661 We ignore dependence vectors whose first element is zero because
9662 such dependencies are known to be executed by the same thread.
9664 We take into account the direction of the loop, so a minimum
9665 becomes a maximum if the loop is iterating forwards. We also
9666 ignore sink clauses where the loop direction is unknown, or where
9667 the offsets are clearly invalid because they are not a multiple
9668 of the loop increment.
9670 For example:
9672 #pragma omp for ordered(2)
9673 for (i=0; i < N; ++i)
9674 for (j=0; j < M; ++j)
9676 #pragma omp ordered \
9677 depend(sink:i-8,j-2) \
9678 depend(sink:i,j-1) \ // Completely ignored because i+0.
9679 depend(sink:i-4,j-3) \
9680 depend(sink:i-6,j-4)
9681 #pragma omp ordered depend(source)
9684 Folded clause is:
9686 depend(sink:-gcd(8,4,6),-min(2,3,4))
9687 -or-
9688 depend(sink:-2,-2)
9691 /* FIXME: Computing GCD's where the first element is zero is
9692 non-trivial in the presence of collapsed loops. Do this later. */
9693 if (fd.collapse > 1)
9694 return;
9696 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9698 /* wide_int is not a POD so it must be default-constructed. */
9699 for (unsigned i = 0; i != 2 * len - 1; ++i)
9700 new (static_cast<void*>(folded_deps + i)) wide_int ();
9702 tree folded_dep = NULL_TREE;
9703 /* TRUE if the first dimension's offset is negative. */
9704 bool neg_offset_p = false;
9706 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9707 unsigned int i;
9708 while ((c = *list_p) != NULL)
9710 bool remove = false;
9712 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
9713 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9714 goto next_ordered_clause;
9716 tree vec;
9717 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9718 vec && TREE_CODE (vec) == TREE_LIST;
9719 vec = TREE_CHAIN (vec), ++i)
9721 gcc_assert (i < len);
9723 /* omp_extract_for_data has canonicalized the condition. */
9724 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9725 || fd.loops[i].cond_code == GT_EXPR);
9726 bool forward = fd.loops[i].cond_code == LT_EXPR;
9727 bool maybe_lexically_later = true;
9729 /* While the committee makes up its mind, bail if we have any
9730 non-constant steps. */
9731 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9732 goto lower_omp_ordered_ret;
9734 tree itype = TREE_TYPE (TREE_VALUE (vec));
9735 if (POINTER_TYPE_P (itype))
9736 itype = sizetype;
9737 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9738 TYPE_PRECISION (itype),
9739 TYPE_SIGN (itype));
9741 /* Ignore invalid offsets that are not multiples of the step. */
9742 if (!wi::multiple_of_p (wi::abs (offset),
9743 wi::abs (wi::to_wide (fd.loops[i].step)),
9744 UNSIGNED))
9746 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9747 "ignoring sink clause with offset that is not "
9748 "a multiple of the loop step");
9749 remove = true;
9750 goto next_ordered_clause;
9753 /* Calculate the first dimension. The first dimension of
9754 the folded dependency vector is the GCD of the first
9755 elements, while ignoring any first elements whose offset
9756 is 0. */
9757 if (i == 0)
9759 /* Ignore dependence vectors whose first dimension is 0. */
9760 if (offset == 0)
9762 remove = true;
9763 goto next_ordered_clause;
9765 else
9767 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9769 error_at (OMP_CLAUSE_LOCATION (c),
9770 "first offset must be in opposite direction "
9771 "of loop iterations");
9772 goto lower_omp_ordered_ret;
9774 if (forward)
9775 offset = -offset;
9776 neg_offset_p = forward;
9777 /* Initialize the first time around. */
9778 if (folded_dep == NULL_TREE)
9780 folded_dep = c;
9781 folded_deps[0] = offset;
9783 else
9784 folded_deps[0] = wi::gcd (folded_deps[0],
9785 offset, UNSIGNED);
9788 /* Calculate minimum for the remaining dimensions. */
9789 else
9791 folded_deps[len + i - 1] = offset;
9792 if (folded_dep == c)
9793 folded_deps[i] = offset;
9794 else if (maybe_lexically_later
9795 && !wi::eq_p (folded_deps[i], offset))
9797 if (forward ^ wi::gts_p (folded_deps[i], offset))
9799 unsigned int j;
9800 folded_dep = c;
9801 for (j = 1; j <= i; j++)
9802 folded_deps[j] = folded_deps[len + j - 1];
9804 else
9805 maybe_lexically_later = false;
9809 gcc_assert (i == len);
9811 remove = true;
9813 next_ordered_clause:
9814 if (remove)
9815 *list_p = OMP_CLAUSE_CHAIN (c);
9816 else
9817 list_p = &OMP_CLAUSE_CHAIN (c);
9820 if (folded_dep)
9822 if (neg_offset_p)
9823 folded_deps[0] = -folded_deps[0];
9825 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9826 if (POINTER_TYPE_P (itype))
9827 itype = sizetype;
9829 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9830 = wide_int_to_tree (itype, folded_deps[0]);
9831 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9832 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9835 lower_omp_ordered_ret:
9837 /* Ordered without clauses is #pragma omp threads, while we want
9838 a nop instead if we remove all clauses. */
9839 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9840 gsi_replace (gsi_p, gimple_build_nop (), true);
9844 /* Expand code for an OpenMP ordered directive. */
9846 static void
9847 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9849 tree block;
9850 gimple *stmt = gsi_stmt (*gsi_p), *g;
9851 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9852 gcall *x;
9853 gbind *bind;
9854 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9855 OMP_CLAUSE_SIMD);
9856 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9857 loop. */
9858 bool maybe_simt
9859 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9860 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9861 OMP_CLAUSE_THREADS);
9863 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9864 OMP_CLAUSE_DEPEND))
9866 /* FIXME: This is needs to be moved to the expansion to verify various
9867 conditions only testable on cfg with dominators computed, and also
9868 all the depend clauses to be merged still might need to be available
9869 for the runtime checks. */
9870 if (0)
9871 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9872 return;
9875 push_gimplify_context ();
9877 block = make_node (BLOCK);
9878 bind = gimple_build_bind (NULL, NULL, block);
9879 gsi_replace (gsi_p, bind, true);
9880 gimple_bind_add_stmt (bind, stmt);
9882 if (simd)
9884 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9885 build_int_cst (NULL_TREE, threads));
9886 cfun->has_simduid_loops = true;
9888 else
9889 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9891 gimple_bind_add_stmt (bind, x);
9893 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9894 if (maybe_simt)
9896 counter = create_tmp_var (integer_type_node);
9897 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9898 gimple_call_set_lhs (g, counter);
9899 gimple_bind_add_stmt (bind, g);
9901 body = create_artificial_label (UNKNOWN_LOCATION);
9902 test = create_artificial_label (UNKNOWN_LOCATION);
9903 gimple_bind_add_stmt (bind, gimple_build_label (body));
9905 tree simt_pred = create_tmp_var (integer_type_node);
9906 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9907 gimple_call_set_lhs (g, simt_pred);
9908 gimple_bind_add_stmt (bind, g);
9910 tree t = create_artificial_label (UNKNOWN_LOCATION);
9911 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9912 gimple_bind_add_stmt (bind, g);
9914 gimple_bind_add_stmt (bind, gimple_build_label (t));
9916 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9917 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9918 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9919 gimple_omp_set_body (stmt, NULL);
9921 if (maybe_simt)
9923 gimple_bind_add_stmt (bind, gimple_build_label (test));
9924 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9925 gimple_bind_add_stmt (bind, g);
9927 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9928 tree nonneg = create_tmp_var (integer_type_node);
9929 gimple_seq tseq = NULL;
9930 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9931 gimple_bind_add_seq (bind, tseq);
9933 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9934 gimple_call_set_lhs (g, nonneg);
9935 gimple_bind_add_stmt (bind, g);
9937 tree end = create_artificial_label (UNKNOWN_LOCATION);
9938 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9939 gimple_bind_add_stmt (bind, g);
9941 gimple_bind_add_stmt (bind, gimple_build_label (end));
9943 if (simd)
9944 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9945 build_int_cst (NULL_TREE, threads));
9946 else
9947 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9949 gimple_bind_add_stmt (bind, x);
9951 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9953 pop_gimplify_context (bind);
9955 gimple_bind_append_vars (bind, ctx->block_vars);
9956 BLOCK_VARS (block) = gimple_bind_vars (bind);
9960 /* Expand code for an OpenMP scan directive and the structured block
9961 before the scan directive. */
9963 static void
9964 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9966 gimple *stmt = gsi_stmt (*gsi_p);
9967 bool has_clauses
9968 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9969 tree lane = NULL_TREE;
9970 gimple_seq before = NULL;
9971 omp_context *octx = ctx->outer;
9972 gcc_assert (octx);
9973 if (octx->scan_exclusive && !has_clauses)
9975 gimple_stmt_iterator gsi2 = *gsi_p;
9976 gsi_next (&gsi2);
9977 gimple *stmt2 = gsi_stmt (gsi2);
9978 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9979 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9980 the one with exclusive clause(s), comes first. */
9981 if (stmt2
9982 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9983 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9985 gsi_remove (gsi_p, false);
9986 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9987 ctx = maybe_lookup_ctx (stmt2);
9988 gcc_assert (ctx);
9989 lower_omp_scan (gsi_p, ctx);
9990 return;
9994 bool input_phase = has_clauses ^ octx->scan_inclusive;
9995 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9996 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9997 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9998 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9999 && !gimple_omp_for_combined_p (octx->stmt));
10000 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
10001 if (is_for_simd && octx->for_simd_scan_phase)
10002 is_simd = false;
10003 if (is_simd)
10004 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
10005 OMP_CLAUSE__SIMDUID_))
10007 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10008 lane = create_tmp_var (unsigned_type_node);
10009 tree t = build_int_cst (integer_type_node,
10010 input_phase ? 1
10011 : octx->scan_inclusive ? 2 : 3);
10012 gimple *g
10013 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10014 gimple_call_set_lhs (g, lane);
10015 gimple_seq_add_stmt (&before, g);
10018 if (is_simd || is_for)
10020 for (tree c = gimple_omp_for_clauses (octx->stmt);
10021 c; c = OMP_CLAUSE_CHAIN (c))
10022 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10023 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10025 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10026 tree var = OMP_CLAUSE_DECL (c);
10027 tree new_var = lookup_decl (var, octx);
10028 tree val = new_var;
10029 tree var2 = NULL_TREE;
10030 tree var3 = NULL_TREE;
10031 tree var4 = NULL_TREE;
10032 tree lane0 = NULL_TREE;
10033 tree new_vard = new_var;
10034 if (omp_privatize_by_reference (var))
10036 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10037 val = new_var;
10039 if (DECL_HAS_VALUE_EXPR_P (new_vard))
10041 val = DECL_VALUE_EXPR (new_vard);
10042 if (new_vard != new_var)
10044 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10045 val = TREE_OPERAND (val, 0);
10047 if (TREE_CODE (val) == ARRAY_REF
10048 && VAR_P (TREE_OPERAND (val, 0)))
10050 tree v = TREE_OPERAND (val, 0);
10051 if (lookup_attribute ("omp simd array",
10052 DECL_ATTRIBUTES (v)))
10054 val = unshare_expr (val);
10055 lane0 = TREE_OPERAND (val, 1);
10056 TREE_OPERAND (val, 1) = lane;
10057 var2 = lookup_decl (v, octx);
10058 if (octx->scan_exclusive)
10059 var4 = lookup_decl (var2, octx);
10060 if (input_phase
10061 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10062 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10063 if (!input_phase)
10065 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10066 var2, lane, NULL_TREE, NULL_TREE);
10067 TREE_THIS_NOTRAP (var2) = 1;
10068 if (octx->scan_exclusive)
10070 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10071 var4, lane, NULL_TREE,
10072 NULL_TREE);
10073 TREE_THIS_NOTRAP (var4) = 1;
10076 else
10077 var2 = val;
10080 gcc_assert (var2);
10082 else
10084 var2 = build_outer_var_ref (var, octx);
10085 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10087 var3 = maybe_lookup_decl (new_vard, octx);
10088 if (var3 == new_vard || var3 == NULL_TREE)
10089 var3 = NULL_TREE;
10090 else if (is_simd && octx->scan_exclusive && !input_phase)
10092 var4 = maybe_lookup_decl (var3, octx);
10093 if (var4 == var3 || var4 == NULL_TREE)
10095 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10097 var4 = var3;
10098 var3 = NULL_TREE;
10100 else
10101 var4 = NULL_TREE;
10105 if (is_simd
10106 && octx->scan_exclusive
10107 && !input_phase
10108 && var4 == NULL_TREE)
10109 var4 = create_tmp_var (TREE_TYPE (val));
10111 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10113 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10114 if (input_phase)
10116 if (var3)
10118 /* If we've added a separate identity element
10119 variable, copy it over into val. */
10120 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10121 var3);
10122 gimplify_and_add (x, &before);
10124 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10126 /* Otherwise, assign to it the identity element. */
10127 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10128 if (is_for)
10129 tseq = copy_gimple_seq_and_replace_locals (tseq);
10130 tree ref = build_outer_var_ref (var, octx);
10131 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10132 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10133 if (x)
10135 if (new_vard != new_var)
10136 val = build_fold_addr_expr_loc (clause_loc, val);
10137 SET_DECL_VALUE_EXPR (new_vard, val);
10139 SET_DECL_VALUE_EXPR (placeholder, ref);
10140 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10141 lower_omp (&tseq, octx);
10142 if (x)
10143 SET_DECL_VALUE_EXPR (new_vard, x);
10144 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10145 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10146 gimple_seq_add_seq (&before, tseq);
10147 if (is_simd)
10148 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10151 else if (is_simd)
10153 tree x;
10154 if (octx->scan_exclusive)
10156 tree v4 = unshare_expr (var4);
10157 tree v2 = unshare_expr (var2);
10158 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10159 gimplify_and_add (x, &before);
10161 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10162 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10163 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10164 tree vexpr = val;
10165 if (x && new_vard != new_var)
10166 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10167 if (x)
10168 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10169 SET_DECL_VALUE_EXPR (placeholder, var2);
10170 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10171 lower_omp (&tseq, octx);
10172 gimple_seq_add_seq (&before, tseq);
10173 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10174 if (x)
10175 SET_DECL_VALUE_EXPR (new_vard, x);
10176 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10177 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10178 if (octx->scan_inclusive)
10180 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10181 var2);
10182 gimplify_and_add (x, &before);
10184 else if (lane0 == NULL_TREE)
10186 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10187 var4);
10188 gimplify_and_add (x, &before);
10192 else
10194 if (input_phase)
10196 /* input phase. Set val to initializer before
10197 the body. */
10198 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10199 gimplify_assign (val, x, &before);
10201 else if (is_simd)
10203 /* scan phase. */
10204 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10205 if (code == MINUS_EXPR)
10206 code = PLUS_EXPR;
10208 tree x = build2 (code, TREE_TYPE (var2),
10209 unshare_expr (var2), unshare_expr (val));
10210 if (octx->scan_inclusive)
10212 gimplify_assign (unshare_expr (var2), x, &before);
10213 gimplify_assign (val, var2, &before);
10215 else
10217 gimplify_assign (unshare_expr (var4),
10218 unshare_expr (var2), &before);
10219 gimplify_assign (var2, x, &before);
10220 if (lane0 == NULL_TREE)
10221 gimplify_assign (val, var4, &before);
10225 if (octx->scan_exclusive && !input_phase && lane0)
10227 tree vexpr = unshare_expr (var4);
10228 TREE_OPERAND (vexpr, 1) = lane0;
10229 if (new_vard != new_var)
10230 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10231 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10235 if (is_simd && !is_for_simd)
10237 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10238 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10239 gsi_replace (gsi_p, gimple_build_nop (), true);
10240 return;
10242 lower_omp (gimple_omp_body_ptr (stmt), octx);
10243 if (before)
10245 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
10246 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10251 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10252 substitution of a couple of function calls. But in the NAMED case,
10253 requires that languages coordinate a symbol name. It is therefore
10254 best put here in common code. */
10256 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10258 static void
10259 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10261 tree block;
10262 tree name, lock, unlock;
10263 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10264 gbind *bind;
10265 location_t loc = gimple_location (stmt);
10266 gimple_seq tbody;
10268 name = gimple_omp_critical_name (stmt);
10269 if (name)
10271 tree decl;
10273 if (!critical_name_mutexes)
10274 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10276 tree *n = critical_name_mutexes->get (name);
10277 if (n == NULL)
10279 char *new_str;
10281 decl = create_tmp_var_raw (ptr_type_node);
10283 new_str = ACONCAT ((".gomp_critical_user_",
10284 IDENTIFIER_POINTER (name), NULL));
10285 DECL_NAME (decl) = get_identifier (new_str);
10286 TREE_PUBLIC (decl) = 1;
10287 TREE_STATIC (decl) = 1;
10288 DECL_COMMON (decl) = 1;
10289 DECL_ARTIFICIAL (decl) = 1;
10290 DECL_IGNORED_P (decl) = 1;
10292 varpool_node::finalize_decl (decl);
10294 critical_name_mutexes->put (name, decl);
10296 else
10297 decl = *n;
10299 /* If '#pragma omp critical' is inside offloaded region or
10300 inside function marked as offloadable, the symbol must be
10301 marked as offloadable too. */
10302 omp_context *octx;
10303 if (cgraph_node::get (current_function_decl)->offloadable)
10304 varpool_node::get_create (decl)->offloadable = 1;
10305 else
10306 for (octx = ctx->outer; octx; octx = octx->outer)
10307 if (is_gimple_omp_offloaded (octx->stmt))
10309 varpool_node::get_create (decl)->offloadable = 1;
10310 break;
10313 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10314 lock = build_call_expr_loc (loc, lock, 1,
10315 build_fold_addr_expr_loc (loc, decl));
10317 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10318 unlock = build_call_expr_loc (loc, unlock, 1,
10319 build_fold_addr_expr_loc (loc, decl));
10321 else
10323 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10324 lock = build_call_expr_loc (loc, lock, 0);
10326 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10327 unlock = build_call_expr_loc (loc, unlock, 0);
10330 push_gimplify_context ();
10332 block = make_node (BLOCK);
10333 bind = gimple_build_bind (NULL, NULL, block);
10334 gsi_replace (gsi_p, bind, true);
10335 gimple_bind_add_stmt (bind, stmt);
10337 tbody = gimple_bind_body (bind);
10338 gimplify_and_add (lock, &tbody);
10339 gimple_bind_set_body (bind, tbody);
10341 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10342 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10343 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10344 gimple_omp_set_body (stmt, NULL);
10346 tbody = gimple_bind_body (bind);
10347 gimplify_and_add (unlock, &tbody);
10348 gimple_bind_set_body (bind, tbody);
10350 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10352 pop_gimplify_context (bind);
10353 gimple_bind_append_vars (bind, ctx->block_vars);
10354 BLOCK_VARS (block) = gimple_bind_vars (bind);
10357 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10358 for a lastprivate clause. Given a loop control predicate of (V
10359 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10360 is appended to *DLIST, iterator initialization is appended to
10361 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10362 to be emitted in a critical section. */
10364 static void
10365 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10366 gimple_seq *dlist, gimple_seq *clist,
10367 struct omp_context *ctx)
10369 tree clauses, cond, vinit;
10370 enum tree_code cond_code;
10371 gimple_seq stmts;
10373 cond_code = fd->loop.cond_code;
10374 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10376 /* When possible, use a strict equality expression. This can let VRP
10377 type optimizations deduce the value and remove a copy. */
10378 if (tree_fits_shwi_p (fd->loop.step))
10380 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10381 if (step == 1 || step == -1)
10382 cond_code = EQ_EXPR;
10385 tree n2 = fd->loop.n2;
10386 if (fd->collapse > 1
10387 && TREE_CODE (n2) != INTEGER_CST
10388 && gimple_omp_for_combined_into_p (fd->for_stmt))
10390 struct omp_context *taskreg_ctx = NULL;
10391 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10393 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10394 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10395 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10397 if (gimple_omp_for_combined_into_p (gfor))
10399 gcc_assert (ctx->outer->outer
10400 && is_parallel_ctx (ctx->outer->outer));
10401 taskreg_ctx = ctx->outer->outer;
10403 else
10405 struct omp_for_data outer_fd;
10406 omp_extract_for_data (gfor, &outer_fd, NULL);
10407 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10410 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10411 taskreg_ctx = ctx->outer->outer;
10413 else if (is_taskreg_ctx (ctx->outer))
10414 taskreg_ctx = ctx->outer;
10415 if (taskreg_ctx)
10417 int i;
10418 tree taskreg_clauses
10419 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10420 tree innerc = omp_find_clause (taskreg_clauses,
10421 OMP_CLAUSE__LOOPTEMP_);
10422 gcc_assert (innerc);
10423 int count = fd->collapse;
10424 if (fd->non_rect
10425 && fd->last_nonrect == fd->first_nonrect + 1)
10426 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10427 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10428 count += 4;
10429 for (i = 0; i < count; i++)
10431 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10432 OMP_CLAUSE__LOOPTEMP_);
10433 gcc_assert (innerc);
10435 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10436 OMP_CLAUSE__LOOPTEMP_);
10437 if (innerc)
10438 n2 = fold_convert (TREE_TYPE (n2),
10439 lookup_decl (OMP_CLAUSE_DECL (innerc),
10440 taskreg_ctx));
10443 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10445 clauses = gimple_omp_for_clauses (fd->for_stmt);
10446 stmts = NULL;
10447 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10448 if (!gimple_seq_empty_p (stmts))
10450 gimple_seq_add_seq (&stmts, *dlist);
10451 *dlist = stmts;
10453 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10454 vinit = fd->loop.n1;
10455 if (cond_code == EQ_EXPR
10456 && tree_fits_shwi_p (fd->loop.n2)
10457 && ! integer_zerop (fd->loop.n2))
10458 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10459 else
10460 vinit = unshare_expr (vinit);
10462 /* Initialize the iterator variable, so that threads that don't execute
10463 any iterations don't execute the lastprivate clauses by accident. */
10464 gimplify_assign (fd->loop.v, vinit, body_p);
10468 /* OpenACC privatization.
10470 Or, in other words, *sharing* at the respective OpenACC level of
10471 parallelism.
10473 From a correctness perspective, a non-addressable variable can't be accessed
10474 outside the current thread, so it can go in a (faster than shared memory)
10475 register -- though that register may need to be broadcast in some
10476 circumstances. A variable can only meaningfully be "shared" across workers
10477 or vector lanes if its address is taken, e.g. by a call to an atomic
10478 builtin.
10480 From an optimisation perspective, the answer might be fuzzier: maybe
10481 sometimes, using shared memory directly would be faster than
10482 broadcasting. */
10484 static void
10485 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10486 const location_t loc, const tree c,
10487 const tree decl)
10489 const dump_user_location_t d_u_loc
10490 = dump_user_location_t::from_location_t (loc);
10491 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10492 #if __GNUC__ >= 10
10493 # pragma GCC diagnostic push
10494 # pragma GCC diagnostic ignored "-Wformat"
10495 #endif
10496 dump_printf_loc (l_dump_flags, d_u_loc,
10497 "variable %<%T%> ", decl);
10498 #if __GNUC__ >= 10
10499 # pragma GCC diagnostic pop
10500 #endif
10501 if (c)
10502 dump_printf (l_dump_flags,
10503 "in %qs clause ",
10504 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10505 else
10506 dump_printf (l_dump_flags,
10507 "declared in block ");
10510 static bool
10511 oacc_privatization_candidate_p (const location_t loc, const tree c,
10512 const tree decl)
10514 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10516 /* There is some differentiation depending on block vs. clause. */
10517 bool block = !c;
10519 bool res = true;
10521 if (res && !VAR_P (decl))
10523 res = false;
10525 if (dump_enabled_p ())
10527 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10528 dump_printf (l_dump_flags,
10529 "potentially has improper OpenACC privatization level: %qs\n",
10530 get_tree_code_name (TREE_CODE (decl)));
10534 if (res && block && TREE_STATIC (decl))
10536 res = false;
10538 if (dump_enabled_p ())
10540 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10541 dump_printf (l_dump_flags,
10542 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10543 "static");
10547 if (res && block && DECL_EXTERNAL (decl))
10549 res = false;
10551 if (dump_enabled_p ())
10553 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10554 dump_printf (l_dump_flags,
10555 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10556 "external");
10560 if (res && !TREE_ADDRESSABLE (decl))
10562 res = false;
10564 if (dump_enabled_p ())
10566 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10567 dump_printf (l_dump_flags,
10568 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10569 "not addressable");
10573 if (res)
10575 if (dump_enabled_p ())
10577 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10578 dump_printf (l_dump_flags,
10579 "is candidate for adjusting OpenACC privatization level\n");
10583 if (dump_file && (dump_flags & TDF_DETAILS))
10585 print_generic_decl (dump_file, decl, dump_flags);
10586 fprintf (dump_file, "\n");
10589 return res;
10592 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10593 CTX. */
10595 static void
10596 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10598 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10599 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10601 tree decl = OMP_CLAUSE_DECL (c);
10603 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c, decl))
10604 continue;
10606 gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10607 ctx->oacc_privatization_candidates.safe_push (decl);
10611 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10612 CTX. */
10614 static void
10615 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10617 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10619 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL, decl))
10620 continue;
10622 gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10623 ctx->oacc_privatization_candidates.safe_push (decl);
10627 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10629 static tree
10630 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10631 struct walk_stmt_info *wi)
10633 gimple *stmt = gsi_stmt (*gsi_p);
10635 *handled_ops_p = true;
10636 switch (gimple_code (stmt))
10638 WALK_SUBSTMTS;
10640 case GIMPLE_OMP_FOR:
10641 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10642 && gimple_omp_for_combined_into_p (stmt))
10643 *handled_ops_p = false;
10644 break;
10646 case GIMPLE_OMP_SCAN:
10647 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10648 return integer_zero_node;
10649 default:
10650 break;
10652 return NULL;
10655 /* Helper function for lower_omp_for, add transformations for a worksharing
10656 loop with scan directives inside of it.
10657 For worksharing loop not combined with simd, transform:
10658 #pragma omp for reduction(inscan,+:r) private(i)
10659 for (i = 0; i < n; i = i + 1)
10662 update (r);
10664 #pragma omp scan inclusive(r)
10666 use (r);
10670 into two worksharing loops + code to merge results:
10672 num_threads = omp_get_num_threads ();
10673 thread_num = omp_get_thread_num ();
10674 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10675 <D.2099>:
10676 var2 = r;
10677 goto <D.2101>;
10678 <D.2100>:
10679 // For UDRs this is UDR init, or if ctors are needed, copy from
10680 // var3 that has been constructed to contain the neutral element.
10681 var2 = 0;
10682 <D.2101>:
10683 ivar = 0;
10684 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10685 // a shared array with num_threads elements and rprivb to a local array
10686 // number of elements equal to the number of (contiguous) iterations the
10687 // current thread will perform. controlb and controlp variables are
10688 // temporaries to handle deallocation of rprivb at the end of second
10689 // GOMP_FOR.
10690 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10691 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10692 for (i = 0; i < n; i = i + 1)
10695 // For UDRs this is UDR init or copy from var3.
10696 r = 0;
10697 // This is the input phase from user code.
10698 update (r);
10701 // For UDRs this is UDR merge.
10702 var2 = var2 + r;
10703 // Rather than handing it over to the user, save to local thread's
10704 // array.
10705 rprivb[ivar] = var2;
10706 // For exclusive scan, the above two statements are swapped.
10707 ivar = ivar + 1;
10710 // And remember the final value from this thread's into the shared
10711 // rpriva array.
10712 rpriva[(sizetype) thread_num] = var2;
10713 // If more than one thread, compute using Work-Efficient prefix sum
10714 // the inclusive parallel scan of the rpriva array.
10715 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10716 <D.2102>:
10717 GOMP_barrier ();
10718 down = 0;
10719 k = 1;
10720 num_threadsu = (unsigned int) num_threads;
10721 thread_numup1 = (unsigned int) thread_num + 1;
10722 <D.2108>:
10723 twok = k << 1;
10724 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10725 <D.2110>:
10726 down = 4294967295;
10727 k = k >> 1;
10728 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10729 <D.2112>:
10730 k = k >> 1;
10731 <D.2111>:
10732 twok = k << 1;
10733 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10734 mul = REALPART_EXPR <cplx>;
10735 ovf = IMAGPART_EXPR <cplx>;
10736 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10737 <D.2116>:
10738 andv = k & down;
10739 andvm1 = andv + 4294967295;
10740 l = mul + andvm1;
10741 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10742 <D.2120>:
10743 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10744 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10745 rpriva[l] = rpriva[l - k] + rpriva[l];
10746 <D.2117>:
10747 if (down == 0) goto <D.2121>; else goto <D.2122>;
10748 <D.2121>:
10749 k = k << 1;
10750 goto <D.2123>;
10751 <D.2122>:
10752 k = k >> 1;
10753 <D.2123>:
10754 GOMP_barrier ();
10755 if (k != 0) goto <D.2108>; else goto <D.2103>;
10756 <D.2103>:
10757 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10758 <D.2124>:
10759 // For UDRs this is UDR init or copy from var3.
10760 var2 = 0;
10761 goto <D.2126>;
10762 <D.2125>:
10763 var2 = rpriva[thread_num - 1];
10764 <D.2126>:
10765 ivar = 0;
10766 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10767 reduction(inscan,+:r) private(i)
10768 for (i = 0; i < n; i = i + 1)
10771 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10772 r = var2 + rprivb[ivar];
10775 // This is the scan phase from user code.
10776 use (r);
10777 // Plus a bump of the iterator.
10778 ivar = ivar + 1;
10780 } */
10782 static void
10783 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10784 struct omp_for_data *fd, omp_context *ctx)
10786 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10787 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10789 gimple_seq body = gimple_omp_body (stmt);
10790 gimple_stmt_iterator input1_gsi = gsi_none ();
10791 struct walk_stmt_info wi;
10792 memset (&wi, 0, sizeof (wi));
10793 wi.val_only = true;
10794 wi.info = (void *) &input1_gsi;
10795 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10796 gcc_assert (!gsi_end_p (input1_gsi));
10798 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10799 gimple_stmt_iterator gsi = input1_gsi;
10800 gsi_next (&gsi);
10801 gimple_stmt_iterator scan1_gsi = gsi;
10802 gimple *scan_stmt1 = gsi_stmt (gsi);
10803 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10805 gimple_seq input_body = gimple_omp_body (input_stmt1);
10806 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10807 gimple_omp_set_body (input_stmt1, NULL);
10808 gimple_omp_set_body (scan_stmt1, NULL);
10809 gimple_omp_set_body (stmt, NULL);
10811 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10812 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10813 gimple_omp_set_body (stmt, body);
10814 gimple_omp_set_body (input_stmt1, input_body);
10816 gimple_stmt_iterator input2_gsi = gsi_none ();
10817 memset (&wi, 0, sizeof (wi));
10818 wi.val_only = true;
10819 wi.info = (void *) &input2_gsi;
10820 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10821 gcc_assert (!gsi_end_p (input2_gsi));
10823 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10824 gsi = input2_gsi;
10825 gsi_next (&gsi);
10826 gimple_stmt_iterator scan2_gsi = gsi;
10827 gimple *scan_stmt2 = gsi_stmt (gsi);
10828 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10829 gimple_omp_set_body (scan_stmt2, scan_body);
10831 gimple_stmt_iterator input3_gsi = gsi_none ();
10832 gimple_stmt_iterator scan3_gsi = gsi_none ();
10833 gimple_stmt_iterator input4_gsi = gsi_none ();
10834 gimple_stmt_iterator scan4_gsi = gsi_none ();
10835 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10836 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10837 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10838 if (is_for_simd)
10840 memset (&wi, 0, sizeof (wi));
10841 wi.val_only = true;
10842 wi.info = (void *) &input3_gsi;
10843 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10844 gcc_assert (!gsi_end_p (input3_gsi));
10846 input_stmt3 = gsi_stmt (input3_gsi);
10847 gsi = input3_gsi;
10848 gsi_next (&gsi);
10849 scan3_gsi = gsi;
10850 scan_stmt3 = gsi_stmt (gsi);
10851 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10853 memset (&wi, 0, sizeof (wi));
10854 wi.val_only = true;
10855 wi.info = (void *) &input4_gsi;
10856 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10857 gcc_assert (!gsi_end_p (input4_gsi));
10859 input_stmt4 = gsi_stmt (input4_gsi);
10860 gsi = input4_gsi;
10861 gsi_next (&gsi);
10862 scan4_gsi = gsi;
10863 scan_stmt4 = gsi_stmt (gsi);
10864 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10866 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10867 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10870 tree num_threads = create_tmp_var (integer_type_node);
10871 tree thread_num = create_tmp_var (integer_type_node);
10872 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10873 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10874 gimple *g = gimple_build_call (nthreads_decl, 0);
10875 gimple_call_set_lhs (g, num_threads);
10876 gimple_seq_add_stmt (body_p, g);
10877 g = gimple_build_call (threadnum_decl, 0);
10878 gimple_call_set_lhs (g, thread_num);
10879 gimple_seq_add_stmt (body_p, g);
10881 tree ivar = create_tmp_var (sizetype);
10882 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10883 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10884 tree k = create_tmp_var (unsigned_type_node);
10885 tree l = create_tmp_var (unsigned_type_node);
10887 gimple_seq clist = NULL, mdlist = NULL;
10888 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10889 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10890 gimple_seq scan1_list = NULL, input2_list = NULL;
10891 gimple_seq last_list = NULL, reduc_list = NULL;
10892 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10893 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10894 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10896 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10897 tree var = OMP_CLAUSE_DECL (c);
10898 tree new_var = lookup_decl (var, ctx);
10899 tree var3 = NULL_TREE;
10900 tree new_vard = new_var;
10901 if (omp_privatize_by_reference (var))
10902 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10903 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10905 var3 = maybe_lookup_decl (new_vard, ctx);
10906 if (var3 == new_vard)
10907 var3 = NULL_TREE;
10910 tree ptype = build_pointer_type (TREE_TYPE (new_var));
10911 tree rpriva = create_tmp_var (ptype);
10912 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10913 OMP_CLAUSE_DECL (nc) = rpriva;
10914 *cp1 = nc;
10915 cp1 = &OMP_CLAUSE_CHAIN (nc);
10917 tree rprivb = create_tmp_var (ptype);
10918 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10919 OMP_CLAUSE_DECL (nc) = rprivb;
10920 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10921 *cp1 = nc;
10922 cp1 = &OMP_CLAUSE_CHAIN (nc);
10924 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10925 if (new_vard != new_var)
10926 TREE_ADDRESSABLE (var2) = 1;
10927 gimple_add_tmp_var (var2);
10929 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
10930 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10931 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10932 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10933 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
10935 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
10936 thread_num, integer_minus_one_node);
10937 x = fold_convert_loc (clause_loc, sizetype, x);
10938 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10939 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10940 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10941 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
10943 x = fold_convert_loc (clause_loc, sizetype, l);
10944 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10945 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10946 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10947 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
10949 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
10950 x = fold_convert_loc (clause_loc, sizetype, x);
10951 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10952 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10953 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10954 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
10956 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
10957 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10958 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
10959 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
10961 tree var4 = is_for_simd ? new_var : var2;
10962 tree var5 = NULL_TREE, var6 = NULL_TREE;
10963 if (is_for_simd)
10965 var5 = lookup_decl (var, input_simd_ctx);
10966 var6 = lookup_decl (var, scan_simd_ctx);
10967 if (new_vard != new_var)
10969 var5 = build_simple_mem_ref_loc (clause_loc, var5);
10970 var6 = build_simple_mem_ref_loc (clause_loc, var6);
10973 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10975 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10976 tree val = var2;
10978 x = lang_hooks.decls.omp_clause_default_ctor
10979 (c, var2, build_outer_var_ref (var, ctx));
10980 if (x)
10981 gimplify_and_add (x, &clist);
10983 x = build_outer_var_ref (var, ctx);
10984 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
10986 gimplify_and_add (x, &thr01_list);
10988 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
10989 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10990 if (var3)
10992 x = unshare_expr (var4);
10993 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10994 gimplify_and_add (x, &thrn1_list);
10995 x = unshare_expr (var4);
10996 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10997 gimplify_and_add (x, &thr02_list);
10999 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
11001 /* Otherwise, assign to it the identity element. */
11002 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11003 tseq = copy_gimple_seq_and_replace_locals (tseq);
11004 if (!is_for_simd)
11006 if (new_vard != new_var)
11007 val = build_fold_addr_expr_loc (clause_loc, val);
11008 SET_DECL_VALUE_EXPR (new_vard, val);
11009 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11011 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11012 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11013 lower_omp (&tseq, ctx);
11014 gimple_seq_add_seq (&thrn1_list, tseq);
11015 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11016 lower_omp (&tseq, ctx);
11017 gimple_seq_add_seq (&thr02_list, tseq);
11018 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11019 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11020 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11021 if (y)
11022 SET_DECL_VALUE_EXPR (new_vard, y);
11023 else
11025 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11026 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11030 x = unshare_expr (var4);
11031 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11032 gimplify_and_add (x, &thrn2_list);
11034 if (is_for_simd)
11036 x = unshare_expr (rprivb_ref);
11037 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11038 gimplify_and_add (x, &scan1_list);
11040 else
11042 if (ctx->scan_exclusive)
11044 x = unshare_expr (rprivb_ref);
11045 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11046 gimplify_and_add (x, &scan1_list);
11049 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11050 tseq = copy_gimple_seq_and_replace_locals (tseq);
11051 SET_DECL_VALUE_EXPR (placeholder, var2);
11052 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11053 lower_omp (&tseq, ctx);
11054 gimple_seq_add_seq (&scan1_list, tseq);
11056 if (ctx->scan_inclusive)
11058 x = unshare_expr (rprivb_ref);
11059 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11060 gimplify_and_add (x, &scan1_list);
11064 x = unshare_expr (rpriva_ref);
11065 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11066 unshare_expr (var4));
11067 gimplify_and_add (x, &mdlist);
11069 x = unshare_expr (is_for_simd ? var6 : new_var);
11070 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11071 gimplify_and_add (x, &input2_list);
11073 val = rprivb_ref;
11074 if (new_vard != new_var)
11075 val = build_fold_addr_expr_loc (clause_loc, val);
11077 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11078 tseq = copy_gimple_seq_and_replace_locals (tseq);
11079 SET_DECL_VALUE_EXPR (new_vard, val);
11080 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11081 if (is_for_simd)
11083 SET_DECL_VALUE_EXPR (placeholder, var6);
11084 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11086 else
11087 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11088 lower_omp (&tseq, ctx);
11089 if (y)
11090 SET_DECL_VALUE_EXPR (new_vard, y);
11091 else
11093 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11094 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11096 if (!is_for_simd)
11098 SET_DECL_VALUE_EXPR (placeholder, new_var);
11099 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11100 lower_omp (&tseq, ctx);
11102 gimple_seq_add_seq (&input2_list, tseq);
11104 x = build_outer_var_ref (var, ctx);
11105 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11106 gimplify_and_add (x, &last_list);
11108 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11109 gimplify_and_add (x, &reduc_list);
11110 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11111 tseq = copy_gimple_seq_and_replace_locals (tseq);
11112 val = rprival_ref;
11113 if (new_vard != new_var)
11114 val = build_fold_addr_expr_loc (clause_loc, val);
11115 SET_DECL_VALUE_EXPR (new_vard, val);
11116 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11117 SET_DECL_VALUE_EXPR (placeholder, var2);
11118 lower_omp (&tseq, ctx);
11119 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11120 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11121 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11122 if (y)
11123 SET_DECL_VALUE_EXPR (new_vard, y);
11124 else
11126 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11127 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11129 gimple_seq_add_seq (&reduc_list, tseq);
11130 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11131 gimplify_and_add (x, &reduc_list);
11133 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11134 if (x)
11135 gimplify_and_add (x, dlist);
11137 else
11139 x = build_outer_var_ref (var, ctx);
11140 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11142 x = omp_reduction_init (c, TREE_TYPE (new_var));
11143 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11144 &thrn1_list);
11145 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11147 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11149 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11150 if (code == MINUS_EXPR)
11151 code = PLUS_EXPR;
11153 if (is_for_simd)
11154 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11155 else
11157 if (ctx->scan_exclusive)
11158 gimplify_assign (unshare_expr (rprivb_ref), var2,
11159 &scan1_list);
11160 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11161 gimplify_assign (var2, x, &scan1_list);
11162 if (ctx->scan_inclusive)
11163 gimplify_assign (unshare_expr (rprivb_ref), var2,
11164 &scan1_list);
11167 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11168 &mdlist);
11170 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11171 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11173 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11174 &last_list);
11176 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11177 unshare_expr (rprival_ref));
11178 gimplify_assign (rprival_ref, x, &reduc_list);
11182 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11183 gimple_seq_add_stmt (&scan1_list, g);
11184 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11185 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11186 ? scan_stmt4 : scan_stmt2), g);
11188 tree controlb = create_tmp_var (boolean_type_node);
11189 tree controlp = create_tmp_var (ptr_type_node);
11190 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11191 OMP_CLAUSE_DECL (nc) = controlb;
11192 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11193 *cp1 = nc;
11194 cp1 = &OMP_CLAUSE_CHAIN (nc);
11195 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11196 OMP_CLAUSE_DECL (nc) = controlp;
11197 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11198 *cp1 = nc;
11199 cp1 = &OMP_CLAUSE_CHAIN (nc);
11200 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11201 OMP_CLAUSE_DECL (nc) = controlb;
11202 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11203 *cp2 = nc;
11204 cp2 = &OMP_CLAUSE_CHAIN (nc);
11205 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11206 OMP_CLAUSE_DECL (nc) = controlp;
11207 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11208 *cp2 = nc;
11209 cp2 = &OMP_CLAUSE_CHAIN (nc);
11211 *cp1 = gimple_omp_for_clauses (stmt);
11212 gimple_omp_for_set_clauses (stmt, new_clauses1);
11213 *cp2 = gimple_omp_for_clauses (new_stmt);
11214 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11216 if (is_for_simd)
11218 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11219 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11221 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11222 GSI_SAME_STMT);
11223 gsi_remove (&input3_gsi, true);
11224 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11225 GSI_SAME_STMT);
11226 gsi_remove (&scan3_gsi, true);
11227 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11228 GSI_SAME_STMT);
11229 gsi_remove (&input4_gsi, true);
11230 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11231 GSI_SAME_STMT);
11232 gsi_remove (&scan4_gsi, true);
11234 else
11236 gimple_omp_set_body (scan_stmt1, scan1_list);
11237 gimple_omp_set_body (input_stmt2, input2_list);
11240 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11241 GSI_SAME_STMT);
11242 gsi_remove (&input1_gsi, true);
11243 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11244 GSI_SAME_STMT);
11245 gsi_remove (&scan1_gsi, true);
11246 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11247 GSI_SAME_STMT);
11248 gsi_remove (&input2_gsi, true);
11249 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11250 GSI_SAME_STMT);
11251 gsi_remove (&scan2_gsi, true);
11253 gimple_seq_add_seq (body_p, clist);
11255 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11256 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11257 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11258 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11259 gimple_seq_add_stmt (body_p, g);
11260 g = gimple_build_label (lab1);
11261 gimple_seq_add_stmt (body_p, g);
11262 gimple_seq_add_seq (body_p, thr01_list);
11263 g = gimple_build_goto (lab3);
11264 gimple_seq_add_stmt (body_p, g);
11265 g = gimple_build_label (lab2);
11266 gimple_seq_add_stmt (body_p, g);
11267 gimple_seq_add_seq (body_p, thrn1_list);
11268 g = gimple_build_label (lab3);
11269 gimple_seq_add_stmt (body_p, g);
11271 g = gimple_build_assign (ivar, size_zero_node);
11272 gimple_seq_add_stmt (body_p, g);
11274 gimple_seq_add_stmt (body_p, stmt);
11275 gimple_seq_add_seq (body_p, body);
11276 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11277 fd->loop.v));
11279 g = gimple_build_omp_return (true);
11280 gimple_seq_add_stmt (body_p, g);
11281 gimple_seq_add_seq (body_p, mdlist);
11283 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11284 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11285 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11286 gimple_seq_add_stmt (body_p, g);
11287 g = gimple_build_label (lab1);
11288 gimple_seq_add_stmt (body_p, g);
11290 g = omp_build_barrier (NULL);
11291 gimple_seq_add_stmt (body_p, g);
11293 tree down = create_tmp_var (unsigned_type_node);
11294 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11295 gimple_seq_add_stmt (body_p, g);
11297 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11298 gimple_seq_add_stmt (body_p, g);
11300 tree num_threadsu = create_tmp_var (unsigned_type_node);
11301 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11302 gimple_seq_add_stmt (body_p, g);
11304 tree thread_numu = create_tmp_var (unsigned_type_node);
11305 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11306 gimple_seq_add_stmt (body_p, g);
11308 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11309 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11310 build_int_cst (unsigned_type_node, 1));
11311 gimple_seq_add_stmt (body_p, g);
11313 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11314 g = gimple_build_label (lab3);
11315 gimple_seq_add_stmt (body_p, g);
11317 tree twok = create_tmp_var (unsigned_type_node);
11318 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11319 gimple_seq_add_stmt (body_p, g);
11321 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11322 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11323 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11324 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11325 gimple_seq_add_stmt (body_p, g);
11326 g = gimple_build_label (lab4);
11327 gimple_seq_add_stmt (body_p, g);
11328 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11329 gimple_seq_add_stmt (body_p, g);
11330 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11331 gimple_seq_add_stmt (body_p, g);
11333 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11334 gimple_seq_add_stmt (body_p, g);
11335 g = gimple_build_label (lab6);
11336 gimple_seq_add_stmt (body_p, g);
11338 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11339 gimple_seq_add_stmt (body_p, g);
11341 g = gimple_build_label (lab5);
11342 gimple_seq_add_stmt (body_p, g);
11344 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11345 gimple_seq_add_stmt (body_p, g);
11347 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11348 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11349 gimple_call_set_lhs (g, cplx);
11350 gimple_seq_add_stmt (body_p, g);
11351 tree mul = create_tmp_var (unsigned_type_node);
11352 g = gimple_build_assign (mul, REALPART_EXPR,
11353 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11354 gimple_seq_add_stmt (body_p, g);
11355 tree ovf = create_tmp_var (unsigned_type_node);
11356 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11357 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11358 gimple_seq_add_stmt (body_p, g);
11360 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11361 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11362 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11363 lab7, lab8);
11364 gimple_seq_add_stmt (body_p, g);
11365 g = gimple_build_label (lab7);
11366 gimple_seq_add_stmt (body_p, g);
11368 tree andv = create_tmp_var (unsigned_type_node);
11369 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11370 gimple_seq_add_stmt (body_p, g);
11371 tree andvm1 = create_tmp_var (unsigned_type_node);
11372 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11373 build_minus_one_cst (unsigned_type_node));
11374 gimple_seq_add_stmt (body_p, g);
11376 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11377 gimple_seq_add_stmt (body_p, g);
11379 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11380 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11381 gimple_seq_add_stmt (body_p, g);
11382 g = gimple_build_label (lab9);
11383 gimple_seq_add_stmt (body_p, g);
11384 gimple_seq_add_seq (body_p, reduc_list);
11385 g = gimple_build_label (lab8);
11386 gimple_seq_add_stmt (body_p, g);
11388 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11389 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11390 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11391 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11392 lab10, lab11);
11393 gimple_seq_add_stmt (body_p, g);
11394 g = gimple_build_label (lab10);
11395 gimple_seq_add_stmt (body_p, g);
11396 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11397 gimple_seq_add_stmt (body_p, g);
11398 g = gimple_build_goto (lab12);
11399 gimple_seq_add_stmt (body_p, g);
11400 g = gimple_build_label (lab11);
11401 gimple_seq_add_stmt (body_p, g);
11402 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11403 gimple_seq_add_stmt (body_p, g);
11404 g = gimple_build_label (lab12);
11405 gimple_seq_add_stmt (body_p, g);
11407 g = omp_build_barrier (NULL);
11408 gimple_seq_add_stmt (body_p, g);
11410 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11411 lab3, lab2);
11412 gimple_seq_add_stmt (body_p, g);
11414 g = gimple_build_label (lab2);
11415 gimple_seq_add_stmt (body_p, g);
11417 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11418 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11419 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11420 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11421 gimple_seq_add_stmt (body_p, g);
11422 g = gimple_build_label (lab1);
11423 gimple_seq_add_stmt (body_p, g);
11424 gimple_seq_add_seq (body_p, thr02_list);
11425 g = gimple_build_goto (lab3);
11426 gimple_seq_add_stmt (body_p, g);
11427 g = gimple_build_label (lab2);
11428 gimple_seq_add_stmt (body_p, g);
11429 gimple_seq_add_seq (body_p, thrn2_list);
11430 g = gimple_build_label (lab3);
11431 gimple_seq_add_stmt (body_p, g);
11433 g = gimple_build_assign (ivar, size_zero_node);
11434 gimple_seq_add_stmt (body_p, g);
11435 gimple_seq_add_stmt (body_p, new_stmt);
11436 gimple_seq_add_seq (body_p, new_body);
11438 gimple_seq new_dlist = NULL;
11439 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11440 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11441 tree num_threadsm1 = create_tmp_var (integer_type_node);
11442 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11443 integer_minus_one_node);
11444 gimple_seq_add_stmt (&new_dlist, g);
11445 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11446 gimple_seq_add_stmt (&new_dlist, g);
11447 g = gimple_build_label (lab1);
11448 gimple_seq_add_stmt (&new_dlist, g);
11449 gimple_seq_add_seq (&new_dlist, last_list);
11450 g = gimple_build_label (lab2);
11451 gimple_seq_add_stmt (&new_dlist, g);
11452 gimple_seq_add_seq (&new_dlist, *dlist);
11453 *dlist = new_dlist;
11456 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11457 the addresses of variables to be made private at the surrounding
11458 parallelism level. Such functions appear in the gimple code stream in two
11459 forms, e.g. for a partitioned loop:
11461 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11462 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11463 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11464 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11466 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11467 not as part of a HEAD_MARK sequence:
11469 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11471 For such stand-alone appearances, the 3rd argument is always 0, denoting
11472 gang partitioning. */
11474 static gcall *
11475 lower_oacc_private_marker (omp_context *ctx)
11477 if (ctx->oacc_privatization_candidates.length () == 0)
11478 return NULL;
11480 auto_vec<tree, 5> args;
11482 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11483 args.quick_push (integer_zero_node);
11484 args.quick_push (integer_minus_one_node);
11486 int i;
11487 tree decl;
11488 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11490 for (omp_context *thisctx = ctx; thisctx; thisctx = thisctx->outer)
11492 tree inner_decl = maybe_lookup_decl (decl, thisctx);
11493 if (inner_decl)
11495 decl = inner_decl;
11496 break;
11499 gcc_checking_assert (decl);
11501 tree addr = build_fold_addr_expr (decl);
11502 args.safe_push (addr);
11505 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11508 /* Lower code for an OMP loop directive. */
11510 static void
11511 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11513 tree *rhs_p, block;
11514 struct omp_for_data fd, *fdp = NULL;
11515 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11516 gbind *new_stmt;
11517 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11518 gimple_seq cnt_list = NULL, clist = NULL;
11519 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11520 size_t i;
11522 push_gimplify_context ();
11524 if (is_gimple_omp_oacc (ctx->stmt))
11525 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11527 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11529 block = make_node (BLOCK);
11530 new_stmt = gimple_build_bind (NULL, NULL, block);
11531 /* Replace at gsi right away, so that 'stmt' is no member
11532 of a sequence anymore as we're going to add to a different
11533 one below. */
11534 gsi_replace (gsi_p, new_stmt, true);
11536 /* Move declaration of temporaries in the loop body before we make
11537 it go away. */
11538 omp_for_body = gimple_omp_body (stmt);
11539 if (!gimple_seq_empty_p (omp_for_body)
11540 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11542 gbind *inner_bind
11543 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11544 tree vars = gimple_bind_vars (inner_bind);
11545 if (is_gimple_omp_oacc (ctx->stmt))
11546 oacc_privatization_scan_decl_chain (ctx, vars);
11547 gimple_bind_append_vars (new_stmt, vars);
11548 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11549 keep them on the inner_bind and it's block. */
11550 gimple_bind_set_vars (inner_bind, NULL_TREE);
11551 if (gimple_bind_block (inner_bind))
11552 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11555 if (gimple_omp_for_combined_into_p (stmt))
11557 omp_extract_for_data (stmt, &fd, NULL);
11558 fdp = &fd;
11560 /* We need two temporaries with fd.loop.v type (istart/iend)
11561 and then (fd.collapse - 1) temporaries with the same
11562 type for count2 ... countN-1 vars if not constant. */
11563 size_t count = 2;
11564 tree type = fd.iter_type;
11565 if (fd.collapse > 1
11566 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11567 count += fd.collapse - 1;
11568 size_t count2 = 0;
11569 tree type2 = NULL_TREE;
11570 bool taskreg_for
11571 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11572 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11573 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11574 tree simtc = NULL;
11575 tree clauses = *pc;
11576 if (fd.collapse > 1
11577 && fd.non_rect
11578 && fd.last_nonrect == fd.first_nonrect + 1
11579 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11580 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11581 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11583 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11584 type2 = TREE_TYPE (v);
11585 count++;
11586 count2 = 3;
11588 if (taskreg_for)
11589 outerc
11590 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11591 OMP_CLAUSE__LOOPTEMP_);
11592 if (ctx->simt_stmt)
11593 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11594 OMP_CLAUSE__LOOPTEMP_);
11595 for (i = 0; i < count + count2; i++)
11597 tree temp;
11598 if (taskreg_for)
11600 gcc_assert (outerc);
11601 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11602 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11603 OMP_CLAUSE__LOOPTEMP_);
11605 else
11607 /* If there are 2 adjacent SIMD stmts, one with _simt_
11608 clause, another without, make sure they have the same
11609 decls in _looptemp_ clauses, because the outer stmt
11610 they are combined into will look up just one inner_stmt. */
11611 if (ctx->simt_stmt)
11612 temp = OMP_CLAUSE_DECL (simtc);
11613 else
11614 temp = create_tmp_var (i >= count ? type2 : type);
11615 insert_decl_map (&ctx->outer->cb, temp, temp);
11617 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11618 OMP_CLAUSE_DECL (*pc) = temp;
11619 pc = &OMP_CLAUSE_CHAIN (*pc);
11620 if (ctx->simt_stmt)
11621 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11622 OMP_CLAUSE__LOOPTEMP_);
11624 *pc = clauses;
11627 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11628 dlist = NULL;
11629 body = NULL;
11630 tree rclauses
11631 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11632 OMP_CLAUSE_REDUCTION);
11633 tree rtmp = NULL_TREE;
11634 if (rclauses)
11636 tree type = build_pointer_type (pointer_sized_int_node);
11637 tree temp = create_tmp_var (type);
11638 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11639 OMP_CLAUSE_DECL (c) = temp;
11640 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11641 gimple_omp_for_set_clauses (stmt, c);
11642 lower_omp_task_reductions (ctx, OMP_FOR,
11643 gimple_omp_for_clauses (stmt),
11644 &tred_ilist, &tred_dlist);
11645 rclauses = c;
11646 rtmp = make_ssa_name (type);
11647 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11650 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11651 ctx);
11653 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11654 fdp);
11655 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11656 gimple_omp_for_pre_body (stmt));
11658 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11660 gcall *private_marker = NULL;
11661 if (is_gimple_omp_oacc (ctx->stmt)
11662 && !gimple_seq_empty_p (omp_for_body))
11663 private_marker = lower_oacc_private_marker (ctx);
11665 /* Lower the header expressions. At this point, we can assume that
11666 the header is of the form:
11668 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11670 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11671 using the .omp_data_s mapping, if needed. */
11672 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11674 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11675 if (TREE_CODE (*rhs_p) == TREE_VEC)
11677 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11678 TREE_VEC_ELT (*rhs_p, 1)
11679 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11680 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11681 TREE_VEC_ELT (*rhs_p, 2)
11682 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11684 else if (!is_gimple_min_invariant (*rhs_p))
11685 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11686 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11687 recompute_tree_invariant_for_addr_expr (*rhs_p);
11689 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11690 if (TREE_CODE (*rhs_p) == TREE_VEC)
11692 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11693 TREE_VEC_ELT (*rhs_p, 1)
11694 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11695 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11696 TREE_VEC_ELT (*rhs_p, 2)
11697 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11699 else if (!is_gimple_min_invariant (*rhs_p))
11700 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11701 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11702 recompute_tree_invariant_for_addr_expr (*rhs_p);
11704 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11705 if (!is_gimple_min_invariant (*rhs_p))
11706 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11708 if (rclauses)
11709 gimple_seq_add_seq (&tred_ilist, cnt_list);
11710 else
11711 gimple_seq_add_seq (&body, cnt_list);
11713 /* Once lowered, extract the bounds and clauses. */
11714 omp_extract_for_data (stmt, &fd, NULL);
11716 if (is_gimple_omp_oacc (ctx->stmt)
11717 && !ctx_in_oacc_kernels_region (ctx))
11718 lower_oacc_head_tail (gimple_location (stmt),
11719 gimple_omp_for_clauses (stmt), private_marker,
11720 &oacc_head, &oacc_tail, ctx);
11722 /* Add OpenACC partitioning and reduction markers just before the loop. */
11723 if (oacc_head)
11724 gimple_seq_add_seq (&body, oacc_head);
11726 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11728 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11729 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11730 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11731 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11733 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11734 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11735 OMP_CLAUSE_LINEAR_STEP (c)
11736 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11737 ctx);
11740 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11741 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11742 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11743 else
11745 gimple_seq_add_stmt (&body, stmt);
11746 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11749 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11750 fd.loop.v));
11752 /* After the loop, add exit clauses. */
11753 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11755 if (clist)
11757 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11758 gcall *g = gimple_build_call (fndecl, 0);
11759 gimple_seq_add_stmt (&body, g);
11760 gimple_seq_add_seq (&body, clist);
11761 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11762 g = gimple_build_call (fndecl, 0);
11763 gimple_seq_add_stmt (&body, g);
11766 if (ctx->cancellable)
11767 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11769 gimple_seq_add_seq (&body, dlist);
11771 if (rclauses)
11773 gimple_seq_add_seq (&tred_ilist, body);
11774 body = tred_ilist;
11777 body = maybe_catch_exception (body);
11779 /* Region exit marker goes at the end of the loop body. */
11780 gimple *g = gimple_build_omp_return (fd.have_nowait);
11781 gimple_seq_add_stmt (&body, g);
11783 gimple_seq_add_seq (&body, tred_dlist);
11785 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11787 if (rclauses)
11788 OMP_CLAUSE_DECL (rclauses) = rtmp;
11790 /* Add OpenACC joining and reduction markers just after the loop. */
11791 if (oacc_tail)
11792 gimple_seq_add_seq (&body, oacc_tail);
11794 pop_gimplify_context (new_stmt);
11796 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11797 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11798 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11799 if (BLOCK_VARS (block))
11800 TREE_USED (block) = 1;
11802 gimple_bind_set_body (new_stmt, body);
11803 gimple_omp_set_body (stmt, NULL);
11804 gimple_omp_for_set_pre_body (stmt, NULL);
11807 /* Callback for walk_stmts. Check if the current statement only contains
11808 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11810 static tree
11811 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11812 bool *handled_ops_p,
11813 struct walk_stmt_info *wi)
11815 int *info = (int *) wi->info;
11816 gimple *stmt = gsi_stmt (*gsi_p);
11818 *handled_ops_p = true;
11819 switch (gimple_code (stmt))
11821 WALK_SUBSTMTS;
11823 case GIMPLE_DEBUG:
11824 break;
11825 case GIMPLE_OMP_FOR:
11826 case GIMPLE_OMP_SECTIONS:
11827 *info = *info == 0 ? 1 : -1;
11828 break;
11829 default:
11830 *info = -1;
11831 break;
11833 return NULL;
11836 struct omp_taskcopy_context
11838 /* This field must be at the beginning, as we do "inheritance": Some
11839 callback functions for tree-inline.c (e.g., omp_copy_decl)
11840 receive a copy_body_data pointer that is up-casted to an
11841 omp_context pointer. */
11842 copy_body_data cb;
11843 omp_context *ctx;
11846 static tree
11847 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11849 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11851 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11852 return create_tmp_var (TREE_TYPE (var));
11854 return var;
11857 static tree
11858 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11860 tree name, new_fields = NULL, type, f;
11862 type = lang_hooks.types.make_type (RECORD_TYPE);
11863 name = DECL_NAME (TYPE_NAME (orig_type));
11864 name = build_decl (gimple_location (tcctx->ctx->stmt),
11865 TYPE_DECL, name, type);
11866 TYPE_NAME (type) = name;
11868 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11870 tree new_f = copy_node (f);
11871 DECL_CONTEXT (new_f) = type;
11872 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11873 TREE_CHAIN (new_f) = new_fields;
11874 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11875 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11876 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11877 &tcctx->cb, NULL);
11878 new_fields = new_f;
11879 tcctx->cb.decl_map->put (f, new_f);
11881 TYPE_FIELDS (type) = nreverse (new_fields);
11882 layout_type (type);
11883 return type;
11886 /* Create task copyfn. */
11888 static void
11889 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11891 struct function *child_cfun;
11892 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11893 tree record_type, srecord_type, bind, list;
11894 bool record_needs_remap = false, srecord_needs_remap = false;
11895 splay_tree_node n;
11896 struct omp_taskcopy_context tcctx;
11897 location_t loc = gimple_location (task_stmt);
11898 size_t looptempno = 0;
11900 child_fn = gimple_omp_task_copy_fn (task_stmt);
11901 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11902 gcc_assert (child_cfun->cfg == NULL);
11903 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11905 /* Reset DECL_CONTEXT on function arguments. */
11906 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11907 DECL_CONTEXT (t) = child_fn;
11909 /* Populate the function. */
11910 push_gimplify_context ();
11911 push_cfun (child_cfun);
11913 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11914 TREE_SIDE_EFFECTS (bind) = 1;
11915 list = NULL;
11916 DECL_SAVED_TREE (child_fn) = bind;
11917 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
11919 /* Remap src and dst argument types if needed. */
11920 record_type = ctx->record_type;
11921 srecord_type = ctx->srecord_type;
11922 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
11923 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11925 record_needs_remap = true;
11926 break;
11928 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
11929 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11931 srecord_needs_remap = true;
11932 break;
11935 if (record_needs_remap || srecord_needs_remap)
11937 memset (&tcctx, '\0', sizeof (tcctx));
11938 tcctx.cb.src_fn = ctx->cb.src_fn;
11939 tcctx.cb.dst_fn = child_fn;
11940 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
11941 gcc_checking_assert (tcctx.cb.src_node);
11942 tcctx.cb.dst_node = tcctx.cb.src_node;
11943 tcctx.cb.src_cfun = ctx->cb.src_cfun;
11944 tcctx.cb.copy_decl = task_copyfn_copy_decl;
11945 tcctx.cb.eh_lp_nr = 0;
11946 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
11947 tcctx.cb.decl_map = new hash_map<tree, tree>;
11948 tcctx.ctx = ctx;
11950 if (record_needs_remap)
11951 record_type = task_copyfn_remap_type (&tcctx, record_type);
11952 if (srecord_needs_remap)
11953 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
11955 else
11956 tcctx.cb.decl_map = NULL;
11958 arg = DECL_ARGUMENTS (child_fn);
11959 TREE_TYPE (arg) = build_pointer_type (record_type);
11960 sarg = DECL_CHAIN (arg);
11961 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
11963 /* First pass: initialize temporaries used in record_type and srecord_type
11964 sizes and field offsets. */
11965 if (tcctx.cb.decl_map)
11966 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11967 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11969 tree *p;
11971 decl = OMP_CLAUSE_DECL (c);
11972 p = tcctx.cb.decl_map->get (decl);
11973 if (p == NULL)
11974 continue;
11975 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11976 sf = (tree) n->value;
11977 sf = *tcctx.cb.decl_map->get (sf);
11978 src = build_simple_mem_ref_loc (loc, sarg);
11979 src = omp_build_component_ref (src, sf);
11980 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
11981 append_to_statement_list (t, &list);
11984 /* Second pass: copy shared var pointers and copy construct non-VLA
11985 firstprivate vars. */
11986 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11987 switch (OMP_CLAUSE_CODE (c))
11989 splay_tree_key key;
11990 case OMP_CLAUSE_SHARED:
11991 decl = OMP_CLAUSE_DECL (c);
11992 key = (splay_tree_key) decl;
11993 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
11994 key = (splay_tree_key) &DECL_UID (decl);
11995 n = splay_tree_lookup (ctx->field_map, key);
11996 if (n == NULL)
11997 break;
11998 f = (tree) n->value;
11999 if (tcctx.cb.decl_map)
12000 f = *tcctx.cb.decl_map->get (f);
12001 n = splay_tree_lookup (ctx->sfield_map, key);
12002 sf = (tree) n->value;
12003 if (tcctx.cb.decl_map)
12004 sf = *tcctx.cb.decl_map->get (sf);
12005 src = build_simple_mem_ref_loc (loc, sarg);
12006 src = omp_build_component_ref (src, sf);
12007 dst = build_simple_mem_ref_loc (loc, arg);
12008 dst = omp_build_component_ref (dst, f);
12009 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12010 append_to_statement_list (t, &list);
12011 break;
12012 case OMP_CLAUSE_REDUCTION:
12013 case OMP_CLAUSE_IN_REDUCTION:
12014 decl = OMP_CLAUSE_DECL (c);
12015 if (TREE_CODE (decl) == MEM_REF)
12017 decl = TREE_OPERAND (decl, 0);
12018 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12019 decl = TREE_OPERAND (decl, 0);
12020 if (TREE_CODE (decl) == INDIRECT_REF
12021 || TREE_CODE (decl) == ADDR_EXPR)
12022 decl = TREE_OPERAND (decl, 0);
12024 key = (splay_tree_key) decl;
12025 n = splay_tree_lookup (ctx->field_map, key);
12026 if (n == NULL)
12027 break;
12028 f = (tree) n->value;
12029 if (tcctx.cb.decl_map)
12030 f = *tcctx.cb.decl_map->get (f);
12031 n = splay_tree_lookup (ctx->sfield_map, key);
12032 sf = (tree) n->value;
12033 if (tcctx.cb.decl_map)
12034 sf = *tcctx.cb.decl_map->get (sf);
12035 src = build_simple_mem_ref_loc (loc, sarg);
12036 src = omp_build_component_ref (src, sf);
12037 if (decl != OMP_CLAUSE_DECL (c)
12038 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12039 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12040 src = build_simple_mem_ref_loc (loc, src);
12041 dst = build_simple_mem_ref_loc (loc, arg);
12042 dst = omp_build_component_ref (dst, f);
12043 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12044 append_to_statement_list (t, &list);
12045 break;
12046 case OMP_CLAUSE__LOOPTEMP_:
12047 /* Fields for first two _looptemp_ clauses are initialized by
12048 GOMP_taskloop*, the rest are handled like firstprivate. */
12049 if (looptempno < 2)
12051 looptempno++;
12052 break;
12054 /* FALLTHRU */
12055 case OMP_CLAUSE__REDUCTEMP_:
12056 case OMP_CLAUSE_FIRSTPRIVATE:
12057 decl = OMP_CLAUSE_DECL (c);
12058 if (is_variable_sized (decl))
12059 break;
12060 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12061 if (n == NULL)
12062 break;
12063 f = (tree) n->value;
12064 if (tcctx.cb.decl_map)
12065 f = *tcctx.cb.decl_map->get (f);
12066 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12067 if (n != NULL)
12069 sf = (tree) n->value;
12070 if (tcctx.cb.decl_map)
12071 sf = *tcctx.cb.decl_map->get (sf);
12072 src = build_simple_mem_ref_loc (loc, sarg);
12073 src = omp_build_component_ref (src, sf);
12074 if (use_pointer_for_field (decl, NULL)
12075 || omp_privatize_by_reference (decl))
12076 src = build_simple_mem_ref_loc (loc, src);
12078 else
12079 src = decl;
12080 dst = build_simple_mem_ref_loc (loc, arg);
12081 dst = omp_build_component_ref (dst, f);
12082 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12083 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12084 else
12086 if (ctx->allocate_map)
12087 if (tree *allocatorp = ctx->allocate_map->get (decl))
12089 tree allocator = *allocatorp;
12090 HOST_WIDE_INT ialign = 0;
12091 if (TREE_CODE (allocator) == TREE_LIST)
12093 ialign = tree_to_uhwi (TREE_VALUE (allocator));
12094 allocator = TREE_PURPOSE (allocator);
12096 if (TREE_CODE (allocator) != INTEGER_CST)
12098 n = splay_tree_lookup (ctx->sfield_map,
12099 (splay_tree_key) allocator);
12100 allocator = (tree) n->value;
12101 if (tcctx.cb.decl_map)
12102 allocator = *tcctx.cb.decl_map->get (allocator);
12103 tree a = build_simple_mem_ref_loc (loc, sarg);
12104 allocator = omp_build_component_ref (a, allocator);
12106 allocator = fold_convert (pointer_sized_int_node, allocator);
12107 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12108 tree align = build_int_cst (size_type_node,
12109 MAX (ialign,
12110 DECL_ALIGN_UNIT (decl)));
12111 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12112 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12113 allocator);
12114 ptr = fold_convert (TREE_TYPE (dst), ptr);
12115 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12116 append_to_statement_list (t, &list);
12117 dst = build_simple_mem_ref_loc (loc, dst);
12119 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12121 append_to_statement_list (t, &list);
12122 break;
12123 case OMP_CLAUSE_PRIVATE:
12124 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12125 break;
12126 decl = OMP_CLAUSE_DECL (c);
12127 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12128 f = (tree) n->value;
12129 if (tcctx.cb.decl_map)
12130 f = *tcctx.cb.decl_map->get (f);
12131 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12132 if (n != NULL)
12134 sf = (tree) n->value;
12135 if (tcctx.cb.decl_map)
12136 sf = *tcctx.cb.decl_map->get (sf);
12137 src = build_simple_mem_ref_loc (loc, sarg);
12138 src = omp_build_component_ref (src, sf);
12139 if (use_pointer_for_field (decl, NULL))
12140 src = build_simple_mem_ref_loc (loc, src);
12142 else
12143 src = decl;
12144 dst = build_simple_mem_ref_loc (loc, arg);
12145 dst = omp_build_component_ref (dst, f);
12146 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12147 append_to_statement_list (t, &list);
12148 break;
12149 default:
12150 break;
12153 /* Last pass: handle VLA firstprivates. */
12154 if (tcctx.cb.decl_map)
12155 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12156 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12158 tree ind, ptr, df;
12160 decl = OMP_CLAUSE_DECL (c);
12161 if (!is_variable_sized (decl))
12162 continue;
12163 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12164 if (n == NULL)
12165 continue;
12166 f = (tree) n->value;
12167 f = *tcctx.cb.decl_map->get (f);
12168 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12169 ind = DECL_VALUE_EXPR (decl);
12170 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12171 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12172 n = splay_tree_lookup (ctx->sfield_map,
12173 (splay_tree_key) TREE_OPERAND (ind, 0));
12174 sf = (tree) n->value;
12175 sf = *tcctx.cb.decl_map->get (sf);
12176 src = build_simple_mem_ref_loc (loc, sarg);
12177 src = omp_build_component_ref (src, sf);
12178 src = build_simple_mem_ref_loc (loc, src);
12179 dst = build_simple_mem_ref_loc (loc, arg);
12180 dst = omp_build_component_ref (dst, f);
12181 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12182 append_to_statement_list (t, &list);
12183 n = splay_tree_lookup (ctx->field_map,
12184 (splay_tree_key) TREE_OPERAND (ind, 0));
12185 df = (tree) n->value;
12186 df = *tcctx.cb.decl_map->get (df);
12187 ptr = build_simple_mem_ref_loc (loc, arg);
12188 ptr = omp_build_component_ref (ptr, df);
12189 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12190 build_fold_addr_expr_loc (loc, dst));
12191 append_to_statement_list (t, &list);
12194 t = build1 (RETURN_EXPR, void_type_node, NULL);
12195 append_to_statement_list (t, &list);
12197 if (tcctx.cb.decl_map)
12198 delete tcctx.cb.decl_map;
12199 pop_gimplify_context (NULL);
12200 BIND_EXPR_BODY (bind) = list;
12201 pop_cfun ();
12204 static void
12205 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12207 tree c, clauses;
12208 gimple *g;
12209 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
12211 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12212 gcc_assert (clauses);
12213 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12214 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12215 switch (OMP_CLAUSE_DEPEND_KIND (c))
12217 case OMP_CLAUSE_DEPEND_LAST:
12218 /* Lowering already done at gimplification. */
12219 return;
12220 case OMP_CLAUSE_DEPEND_IN:
12221 cnt[2]++;
12222 break;
12223 case OMP_CLAUSE_DEPEND_OUT:
12224 case OMP_CLAUSE_DEPEND_INOUT:
12225 cnt[0]++;
12226 break;
12227 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12228 cnt[1]++;
12229 break;
12230 case OMP_CLAUSE_DEPEND_DEPOBJ:
12231 cnt[3]++;
12232 break;
12233 case OMP_CLAUSE_DEPEND_SOURCE:
12234 case OMP_CLAUSE_DEPEND_SINK:
12235 /* FALLTHRU */
12236 default:
12237 gcc_unreachable ();
12239 if (cnt[1] || cnt[3])
12240 idx = 5;
12241 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
12242 tree type = build_array_type_nelts (ptr_type_node, total + idx);
12243 tree array = create_tmp_var (type);
12244 TREE_ADDRESSABLE (array) = 1;
12245 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12246 NULL_TREE);
12247 if (idx == 5)
12249 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12250 gimple_seq_add_stmt (iseq, g);
12251 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12252 NULL_TREE);
12254 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12255 gimple_seq_add_stmt (iseq, g);
12256 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12258 r = build4 (ARRAY_REF, ptr_type_node, array,
12259 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12260 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12261 gimple_seq_add_stmt (iseq, g);
12263 for (i = 0; i < 4; i++)
12265 if (cnt[i] == 0)
12266 continue;
12267 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12268 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12269 continue;
12270 else
12272 switch (OMP_CLAUSE_DEPEND_KIND (c))
12274 case OMP_CLAUSE_DEPEND_IN:
12275 if (i != 2)
12276 continue;
12277 break;
12278 case OMP_CLAUSE_DEPEND_OUT:
12279 case OMP_CLAUSE_DEPEND_INOUT:
12280 if (i != 0)
12281 continue;
12282 break;
12283 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12284 if (i != 1)
12285 continue;
12286 break;
12287 case OMP_CLAUSE_DEPEND_DEPOBJ:
12288 if (i != 3)
12289 continue;
12290 break;
12291 default:
12292 gcc_unreachable ();
12294 tree t = OMP_CLAUSE_DECL (c);
12295 t = fold_convert (ptr_type_node, t);
12296 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12297 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12298 NULL_TREE, NULL_TREE);
12299 g = gimple_build_assign (r, t);
12300 gimple_seq_add_stmt (iseq, g);
12303 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12304 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12305 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12306 OMP_CLAUSE_CHAIN (c) = *pclauses;
12307 *pclauses = c;
12308 tree clobber = build_clobber (type);
12309 g = gimple_build_assign (array, clobber);
12310 gimple_seq_add_stmt (oseq, g);
12313 /* Lower the OpenMP parallel or task directive in the current statement
12314 in GSI_P. CTX holds context information for the directive. */
12316 static void
12317 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12319 tree clauses;
12320 tree child_fn, t;
12321 gimple *stmt = gsi_stmt (*gsi_p);
12322 gbind *par_bind, *bind, *dep_bind = NULL;
12323 gimple_seq par_body;
12324 location_t loc = gimple_location (stmt);
12326 clauses = gimple_omp_taskreg_clauses (stmt);
12327 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12328 && gimple_omp_task_taskwait_p (stmt))
12330 par_bind = NULL;
12331 par_body = NULL;
12333 else
12335 par_bind
12336 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12337 par_body = gimple_bind_body (par_bind);
12339 child_fn = ctx->cb.dst_fn;
12340 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12341 && !gimple_omp_parallel_combined_p (stmt))
12343 struct walk_stmt_info wi;
12344 int ws_num = 0;
12346 memset (&wi, 0, sizeof (wi));
12347 wi.info = &ws_num;
12348 wi.val_only = true;
12349 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12350 if (ws_num == 1)
12351 gimple_omp_parallel_set_combined_p (stmt, true);
12353 gimple_seq dep_ilist = NULL;
12354 gimple_seq dep_olist = NULL;
12355 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12356 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12358 push_gimplify_context ();
12359 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12360 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12361 &dep_ilist, &dep_olist);
12364 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12365 && gimple_omp_task_taskwait_p (stmt))
12367 if (dep_bind)
12369 gsi_replace (gsi_p, dep_bind, true);
12370 gimple_bind_add_seq (dep_bind, dep_ilist);
12371 gimple_bind_add_stmt (dep_bind, stmt);
12372 gimple_bind_add_seq (dep_bind, dep_olist);
12373 pop_gimplify_context (dep_bind);
12375 return;
12378 if (ctx->srecord_type)
12379 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12381 gimple_seq tskred_ilist = NULL;
12382 gimple_seq tskred_olist = NULL;
12383 if ((is_task_ctx (ctx)
12384 && gimple_omp_task_taskloop_p (ctx->stmt)
12385 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12386 OMP_CLAUSE_REDUCTION))
12387 || (is_parallel_ctx (ctx)
12388 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12389 OMP_CLAUSE__REDUCTEMP_)))
12391 if (dep_bind == NULL)
12393 push_gimplify_context ();
12394 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12396 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12397 : OMP_PARALLEL,
12398 gimple_omp_taskreg_clauses (ctx->stmt),
12399 &tskred_ilist, &tskred_olist);
12402 push_gimplify_context ();
12404 gimple_seq par_olist = NULL;
12405 gimple_seq par_ilist = NULL;
12406 gimple_seq par_rlist = NULL;
12407 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12408 lower_omp (&par_body, ctx);
12409 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12410 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12412 /* Declare all the variables created by mapping and the variables
12413 declared in the scope of the parallel body. */
12414 record_vars_into (ctx->block_vars, child_fn);
12415 maybe_remove_omp_member_access_dummy_vars (par_bind);
12416 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12418 if (ctx->record_type)
12420 ctx->sender_decl
12421 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12422 : ctx->record_type, ".omp_data_o");
12423 DECL_NAMELESS (ctx->sender_decl) = 1;
12424 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12425 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12428 gimple_seq olist = NULL;
12429 gimple_seq ilist = NULL;
12430 lower_send_clauses (clauses, &ilist, &olist, ctx);
12431 lower_send_shared_vars (&ilist, &olist, ctx);
12433 if (ctx->record_type)
12435 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12436 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12437 clobber));
12440 /* Once all the expansions are done, sequence all the different
12441 fragments inside gimple_omp_body. */
12443 gimple_seq new_body = NULL;
12445 if (ctx->record_type)
12447 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12448 /* fixup_child_record_type might have changed receiver_decl's type. */
12449 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12450 gimple_seq_add_stmt (&new_body,
12451 gimple_build_assign (ctx->receiver_decl, t));
12454 gimple_seq_add_seq (&new_body, par_ilist);
12455 gimple_seq_add_seq (&new_body, par_body);
12456 gimple_seq_add_seq (&new_body, par_rlist);
12457 if (ctx->cancellable)
12458 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12459 gimple_seq_add_seq (&new_body, par_olist);
12460 new_body = maybe_catch_exception (new_body);
12461 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12462 gimple_seq_add_stmt (&new_body,
12463 gimple_build_omp_continue (integer_zero_node,
12464 integer_zero_node));
12465 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12466 gimple_omp_set_body (stmt, new_body);
12468 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12469 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12470 else
12471 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12472 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12473 gimple_bind_add_seq (bind, ilist);
12474 gimple_bind_add_stmt (bind, stmt);
12475 gimple_bind_add_seq (bind, olist);
12477 pop_gimplify_context (NULL);
12479 if (dep_bind)
12481 gimple_bind_add_seq (dep_bind, dep_ilist);
12482 gimple_bind_add_seq (dep_bind, tskred_ilist);
12483 gimple_bind_add_stmt (dep_bind, bind);
12484 gimple_bind_add_seq (dep_bind, tskred_olist);
12485 gimple_bind_add_seq (dep_bind, dep_olist);
12486 pop_gimplify_context (dep_bind);
12490 /* Lower the GIMPLE_OMP_TARGET in the current statement
12491 in GSI_P. CTX holds context information for the directive. */
12493 static void
12494 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12496 tree clauses;
12497 tree child_fn, t, c;
12498 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12499 gbind *tgt_bind, *bind, *dep_bind = NULL;
12500 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12501 location_t loc = gimple_location (stmt);
12502 bool offloaded, data_region;
12503 unsigned int map_cnt = 0;
12504 tree in_reduction_clauses = NULL_TREE;
12506 offloaded = is_gimple_omp_offloaded (stmt);
12507 switch (gimple_omp_target_kind (stmt))
12509 case GF_OMP_TARGET_KIND_REGION:
12510 tree *p, *q;
12511 q = &in_reduction_clauses;
12512 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12513 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12515 *q = *p;
12516 q = &OMP_CLAUSE_CHAIN (*q);
12517 *p = OMP_CLAUSE_CHAIN (*p);
12519 else
12520 p = &OMP_CLAUSE_CHAIN (*p);
12521 *q = NULL_TREE;
12522 *p = in_reduction_clauses;
12523 /* FALLTHRU */
12524 case GF_OMP_TARGET_KIND_UPDATE:
12525 case GF_OMP_TARGET_KIND_ENTER_DATA:
12526 case GF_OMP_TARGET_KIND_EXIT_DATA:
12527 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12528 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12529 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12530 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12531 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12532 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12533 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12534 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12535 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12536 data_region = false;
12537 break;
12538 case GF_OMP_TARGET_KIND_DATA:
12539 case GF_OMP_TARGET_KIND_OACC_DATA:
12540 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12541 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12542 data_region = true;
12543 break;
12544 default:
12545 gcc_unreachable ();
12548 clauses = gimple_omp_target_clauses (stmt);
12550 gimple_seq dep_ilist = NULL;
12551 gimple_seq dep_olist = NULL;
12552 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12553 if (has_depend || in_reduction_clauses)
12555 push_gimplify_context ();
12556 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12557 if (has_depend)
12558 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12559 &dep_ilist, &dep_olist);
12560 if (in_reduction_clauses)
12561 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12562 ctx, NULL);
12565 tgt_bind = NULL;
12566 tgt_body = NULL;
12567 if (offloaded)
12569 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12570 tgt_body = gimple_bind_body (tgt_bind);
12572 else if (data_region)
12573 tgt_body = gimple_omp_body (stmt);
12574 child_fn = ctx->cb.dst_fn;
12576 push_gimplify_context ();
12577 fplist = NULL;
12579 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12580 switch (OMP_CLAUSE_CODE (c))
12582 tree var, x;
12584 default:
12585 break;
12586 case OMP_CLAUSE_MAP:
12587 #if CHECKING_P
12588 /* First check what we're prepared to handle in the following. */
12589 switch (OMP_CLAUSE_MAP_KIND (c))
12591 case GOMP_MAP_ALLOC:
12592 case GOMP_MAP_TO:
12593 case GOMP_MAP_FROM:
12594 case GOMP_MAP_TOFROM:
12595 case GOMP_MAP_POINTER:
12596 case GOMP_MAP_TO_PSET:
12597 case GOMP_MAP_DELETE:
12598 case GOMP_MAP_RELEASE:
12599 case GOMP_MAP_ALWAYS_TO:
12600 case GOMP_MAP_ALWAYS_FROM:
12601 case GOMP_MAP_ALWAYS_TOFROM:
12602 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12603 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12604 case GOMP_MAP_STRUCT:
12605 case GOMP_MAP_ALWAYS_POINTER:
12606 case GOMP_MAP_ATTACH:
12607 case GOMP_MAP_DETACH:
12608 break;
12609 case GOMP_MAP_IF_PRESENT:
12610 case GOMP_MAP_FORCE_ALLOC:
12611 case GOMP_MAP_FORCE_TO:
12612 case GOMP_MAP_FORCE_FROM:
12613 case GOMP_MAP_FORCE_TOFROM:
12614 case GOMP_MAP_FORCE_PRESENT:
12615 case GOMP_MAP_FORCE_DEVICEPTR:
12616 case GOMP_MAP_DEVICE_RESIDENT:
12617 case GOMP_MAP_LINK:
12618 case GOMP_MAP_FORCE_DETACH:
12619 gcc_assert (is_gimple_omp_oacc (stmt));
12620 break;
12621 default:
12622 gcc_unreachable ();
12624 #endif
12625 /* FALLTHRU */
12626 case OMP_CLAUSE_TO:
12627 case OMP_CLAUSE_FROM:
12628 oacc_firstprivate:
12629 var = OMP_CLAUSE_DECL (c);
12630 if (!DECL_P (var))
12632 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12633 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12634 && (OMP_CLAUSE_MAP_KIND (c)
12635 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12636 map_cnt++;
12637 continue;
12640 if (DECL_SIZE (var)
12641 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12643 tree var2 = DECL_VALUE_EXPR (var);
12644 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12645 var2 = TREE_OPERAND (var2, 0);
12646 gcc_assert (DECL_P (var2));
12647 var = var2;
12650 if (offloaded
12651 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12652 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12653 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12655 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12657 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12658 && varpool_node::get_create (var)->offloadable)
12659 continue;
12661 tree type = build_pointer_type (TREE_TYPE (var));
12662 tree new_var = lookup_decl (var, ctx);
12663 x = create_tmp_var_raw (type, get_name (new_var));
12664 gimple_add_tmp_var (x);
12665 x = build_simple_mem_ref (x);
12666 SET_DECL_VALUE_EXPR (new_var, x);
12667 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12669 continue;
12672 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12673 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12674 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12675 && is_omp_target (stmt))
12677 gcc_assert (maybe_lookup_field (c, ctx));
12678 map_cnt++;
12679 continue;
12682 if (!maybe_lookup_field (var, ctx))
12683 continue;
12685 /* Don't remap compute constructs' reduction variables, because the
12686 intermediate result must be local to each gang. */
12687 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12688 && is_gimple_omp_oacc (ctx->stmt)
12689 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12691 x = build_receiver_ref (var, true, ctx);
12692 tree new_var = lookup_decl (var, ctx);
12694 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12695 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12696 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12697 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12698 x = build_simple_mem_ref (x);
12699 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12701 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12702 if (omp_privatize_by_reference (new_var)
12703 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12704 || DECL_BY_REFERENCE (var)))
12706 /* Create a local object to hold the instance
12707 value. */
12708 tree type = TREE_TYPE (TREE_TYPE (new_var));
12709 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12710 tree inst = create_tmp_var (type, id);
12711 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12712 x = build_fold_addr_expr (inst);
12714 gimplify_assign (new_var, x, &fplist);
12716 else if (DECL_P (new_var))
12718 SET_DECL_VALUE_EXPR (new_var, x);
12719 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12721 else
12722 gcc_unreachable ();
12724 map_cnt++;
12725 break;
12727 case OMP_CLAUSE_FIRSTPRIVATE:
12728 gcc_checking_assert (offloaded);
12729 if (is_gimple_omp_oacc (ctx->stmt))
12731 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12732 gcc_checking_assert (!is_oacc_kernels (ctx));
12733 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12734 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12736 goto oacc_firstprivate;
12738 map_cnt++;
12739 var = OMP_CLAUSE_DECL (c);
12740 if (!omp_privatize_by_reference (var)
12741 && !is_gimple_reg_type (TREE_TYPE (var)))
12743 tree new_var = lookup_decl (var, ctx);
12744 if (is_variable_sized (var))
12746 tree pvar = DECL_VALUE_EXPR (var);
12747 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12748 pvar = TREE_OPERAND (pvar, 0);
12749 gcc_assert (DECL_P (pvar));
12750 tree new_pvar = lookup_decl (pvar, ctx);
12751 x = build_fold_indirect_ref (new_pvar);
12752 TREE_THIS_NOTRAP (x) = 1;
12754 else
12755 x = build_receiver_ref (var, true, ctx);
12756 SET_DECL_VALUE_EXPR (new_var, x);
12757 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12759 break;
12761 case OMP_CLAUSE_PRIVATE:
12762 gcc_checking_assert (offloaded);
12763 if (is_gimple_omp_oacc (ctx->stmt))
12765 /* No 'private' clauses on OpenACC 'kernels'. */
12766 gcc_checking_assert (!is_oacc_kernels (ctx));
12767 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12768 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12770 break;
12772 var = OMP_CLAUSE_DECL (c);
12773 if (is_variable_sized (var))
12775 tree new_var = lookup_decl (var, ctx);
12776 tree pvar = DECL_VALUE_EXPR (var);
12777 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12778 pvar = TREE_OPERAND (pvar, 0);
12779 gcc_assert (DECL_P (pvar));
12780 tree new_pvar = lookup_decl (pvar, ctx);
12781 x = build_fold_indirect_ref (new_pvar);
12782 TREE_THIS_NOTRAP (x) = 1;
12783 SET_DECL_VALUE_EXPR (new_var, x);
12784 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12786 break;
12788 case OMP_CLAUSE_USE_DEVICE_PTR:
12789 case OMP_CLAUSE_USE_DEVICE_ADDR:
12790 case OMP_CLAUSE_IS_DEVICE_PTR:
12791 var = OMP_CLAUSE_DECL (c);
12792 map_cnt++;
12793 if (is_variable_sized (var))
12795 tree new_var = lookup_decl (var, ctx);
12796 tree pvar = DECL_VALUE_EXPR (var);
12797 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12798 pvar = TREE_OPERAND (pvar, 0);
12799 gcc_assert (DECL_P (pvar));
12800 tree new_pvar = lookup_decl (pvar, ctx);
12801 x = build_fold_indirect_ref (new_pvar);
12802 TREE_THIS_NOTRAP (x) = 1;
12803 SET_DECL_VALUE_EXPR (new_var, x);
12804 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12806 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12807 && !omp_privatize_by_reference (var)
12808 && !omp_is_allocatable_or_ptr (var)
12809 && !lang_hooks.decls.omp_array_data (var, true))
12810 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12812 tree new_var = lookup_decl (var, ctx);
12813 tree type = build_pointer_type (TREE_TYPE (var));
12814 x = create_tmp_var_raw (type, get_name (new_var));
12815 gimple_add_tmp_var (x);
12816 x = build_simple_mem_ref (x);
12817 SET_DECL_VALUE_EXPR (new_var, x);
12818 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12820 else
12822 tree new_var = lookup_decl (var, ctx);
12823 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12824 gimple_add_tmp_var (x);
12825 SET_DECL_VALUE_EXPR (new_var, x);
12826 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12828 break;
12831 if (offloaded)
12833 target_nesting_level++;
12834 lower_omp (&tgt_body, ctx);
12835 target_nesting_level--;
12837 else if (data_region)
12838 lower_omp (&tgt_body, ctx);
12840 if (offloaded)
12842 /* Declare all the variables created by mapping and the variables
12843 declared in the scope of the target body. */
12844 record_vars_into (ctx->block_vars, child_fn);
12845 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
12846 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
12849 olist = NULL;
12850 ilist = NULL;
12851 if (ctx->record_type)
12853 ctx->sender_decl
12854 = create_tmp_var (ctx->record_type, ".omp_data_arr");
12855 DECL_NAMELESS (ctx->sender_decl) = 1;
12856 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12857 t = make_tree_vec (3);
12858 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12859 TREE_VEC_ELT (t, 1)
12860 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12861 ".omp_data_sizes");
12862 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12863 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12864 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12865 tree tkind_type = short_unsigned_type_node;
12866 int talign_shift = 8;
12867 TREE_VEC_ELT (t, 2)
12868 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12869 ".omp_data_kinds");
12870 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12871 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12872 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12873 gimple_omp_target_set_data_arg (stmt, t);
12875 vec<constructor_elt, va_gc> *vsize;
12876 vec<constructor_elt, va_gc> *vkind;
12877 vec_alloc (vsize, map_cnt);
12878 vec_alloc (vkind, map_cnt);
12879 unsigned int map_idx = 0;
12881 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12882 switch (OMP_CLAUSE_CODE (c))
12884 tree ovar, nc, s, purpose, var, x, type;
12885 unsigned int talign;
12887 default:
12888 break;
12890 case OMP_CLAUSE_MAP:
12891 case OMP_CLAUSE_TO:
12892 case OMP_CLAUSE_FROM:
12893 oacc_firstprivate_map:
12894 nc = c;
12895 ovar = OMP_CLAUSE_DECL (c);
12896 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12897 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12898 || (OMP_CLAUSE_MAP_KIND (c)
12899 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12900 break;
12901 if (!DECL_P (ovar))
12903 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12904 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
12906 nc = OMP_CLAUSE_CHAIN (c);
12907 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
12908 == get_base_address (ovar));
12909 ovar = OMP_CLAUSE_DECL (nc);
12911 else
12913 tree x = build_sender_ref (ovar, ctx);
12914 tree v = ovar;
12915 if (in_reduction_clauses
12916 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12917 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
12919 v = unshare_expr (v);
12920 tree *p = &v;
12921 while (handled_component_p (*p)
12922 || TREE_CODE (*p) == INDIRECT_REF
12923 || TREE_CODE (*p) == ADDR_EXPR
12924 || TREE_CODE (*p) == MEM_REF
12925 || TREE_CODE (*p) == NON_LVALUE_EXPR)
12926 p = &TREE_OPERAND (*p, 0);
12927 tree d = *p;
12928 if (is_variable_sized (d))
12930 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
12931 d = DECL_VALUE_EXPR (d);
12932 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
12933 d = TREE_OPERAND (d, 0);
12934 gcc_assert (DECL_P (d));
12936 splay_tree_key key
12937 = (splay_tree_key) &DECL_CONTEXT (d);
12938 tree nd = (tree) splay_tree_lookup (ctx->field_map,
12939 key)->value;
12940 if (d == *p)
12941 *p = nd;
12942 else
12943 *p = build_fold_indirect_ref (nd);
12945 v = build_fold_addr_expr_with_type (v, ptr_type_node);
12946 gimplify_assign (x, v, &ilist);
12947 nc = NULL_TREE;
12950 else
12952 if (DECL_SIZE (ovar)
12953 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
12955 tree ovar2 = DECL_VALUE_EXPR (ovar);
12956 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
12957 ovar2 = TREE_OPERAND (ovar2, 0);
12958 gcc_assert (DECL_P (ovar2));
12959 ovar = ovar2;
12961 if (!maybe_lookup_field (ovar, ctx)
12962 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12963 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12964 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
12965 continue;
12968 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
12969 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
12970 talign = DECL_ALIGN_UNIT (ovar);
12972 var = NULL_TREE;
12973 if (nc)
12975 if (in_reduction_clauses
12976 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12977 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
12979 tree d = ovar;
12980 if (is_variable_sized (d))
12982 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
12983 d = DECL_VALUE_EXPR (d);
12984 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
12985 d = TREE_OPERAND (d, 0);
12986 gcc_assert (DECL_P (d));
12988 splay_tree_key key
12989 = (splay_tree_key) &DECL_CONTEXT (d);
12990 tree nd = (tree) splay_tree_lookup (ctx->field_map,
12991 key)->value;
12992 if (d == ovar)
12993 var = nd;
12994 else
12995 var = build_fold_indirect_ref (nd);
12997 else
12998 var = lookup_decl_in_outer_ctx (ovar, ctx);
13000 if (nc
13001 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13002 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13003 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13004 && is_omp_target (stmt))
13006 x = build_sender_ref (c, ctx);
13007 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13009 else if (nc)
13011 x = build_sender_ref (ovar, ctx);
13013 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13014 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13015 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13016 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13018 gcc_assert (offloaded);
13019 tree avar
13020 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13021 mark_addressable (avar);
13022 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13023 talign = DECL_ALIGN_UNIT (avar);
13024 avar = build_fold_addr_expr (avar);
13025 gimplify_assign (x, avar, &ilist);
13027 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13029 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13030 if (!omp_privatize_by_reference (var))
13032 if (is_gimple_reg (var)
13033 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13034 suppress_warning (var);
13035 var = build_fold_addr_expr (var);
13037 else
13038 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13039 gimplify_assign (x, var, &ilist);
13041 else if (is_gimple_reg (var))
13043 gcc_assert (offloaded);
13044 tree avar = create_tmp_var (TREE_TYPE (var));
13045 mark_addressable (avar);
13046 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13047 if (GOMP_MAP_COPY_TO_P (map_kind)
13048 || map_kind == GOMP_MAP_POINTER
13049 || map_kind == GOMP_MAP_TO_PSET
13050 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13052 /* If we need to initialize a temporary
13053 with VAR because it is not addressable, and
13054 the variable hasn't been initialized yet, then
13055 we'll get a warning for the store to avar.
13056 Don't warn in that case, the mapping might
13057 be implicit. */
13058 suppress_warning (var, OPT_Wuninitialized);
13059 gimplify_assign (avar, var, &ilist);
13061 avar = build_fold_addr_expr (avar);
13062 gimplify_assign (x, avar, &ilist);
13063 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13064 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13065 && !TYPE_READONLY (TREE_TYPE (var)))
13067 x = unshare_expr (x);
13068 x = build_simple_mem_ref (x);
13069 gimplify_assign (var, x, &olist);
13072 else
13074 /* While MAP is handled explicitly by the FE,
13075 for 'target update', only the identified is passed. */
13076 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13077 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13078 && (omp_is_allocatable_or_ptr (var)
13079 && omp_check_optional_argument (var, false)))
13080 var = build_fold_indirect_ref (var);
13081 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13082 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13083 || (!omp_is_allocatable_or_ptr (var)
13084 && !omp_check_optional_argument (var, false)))
13085 var = build_fold_addr_expr (var);
13086 gimplify_assign (x, var, &ilist);
13089 s = NULL_TREE;
13090 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13092 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13093 s = TREE_TYPE (ovar);
13094 if (TREE_CODE (s) == REFERENCE_TYPE
13095 || omp_check_optional_argument (ovar, false))
13096 s = TREE_TYPE (s);
13097 s = TYPE_SIZE_UNIT (s);
13099 else
13100 s = OMP_CLAUSE_SIZE (c);
13101 if (s == NULL_TREE)
13102 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13103 s = fold_convert (size_type_node, s);
13104 purpose = size_int (map_idx++);
13105 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13106 if (TREE_CODE (s) != INTEGER_CST)
13107 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13109 unsigned HOST_WIDE_INT tkind, tkind_zero;
13110 switch (OMP_CLAUSE_CODE (c))
13112 case OMP_CLAUSE_MAP:
13113 tkind = OMP_CLAUSE_MAP_KIND (c);
13114 tkind_zero = tkind;
13115 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13116 switch (tkind)
13118 case GOMP_MAP_ALLOC:
13119 case GOMP_MAP_IF_PRESENT:
13120 case GOMP_MAP_TO:
13121 case GOMP_MAP_FROM:
13122 case GOMP_MAP_TOFROM:
13123 case GOMP_MAP_ALWAYS_TO:
13124 case GOMP_MAP_ALWAYS_FROM:
13125 case GOMP_MAP_ALWAYS_TOFROM:
13126 case GOMP_MAP_RELEASE:
13127 case GOMP_MAP_FORCE_TO:
13128 case GOMP_MAP_FORCE_FROM:
13129 case GOMP_MAP_FORCE_TOFROM:
13130 case GOMP_MAP_FORCE_PRESENT:
13131 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13132 break;
13133 case GOMP_MAP_DELETE:
13134 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13135 default:
13136 break;
13138 if (tkind_zero != tkind)
13140 if (integer_zerop (s))
13141 tkind = tkind_zero;
13142 else if (integer_nonzerop (s))
13143 tkind_zero = tkind;
13145 break;
13146 case OMP_CLAUSE_FIRSTPRIVATE:
13147 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13148 tkind = GOMP_MAP_TO;
13149 tkind_zero = tkind;
13150 break;
13151 case OMP_CLAUSE_TO:
13152 tkind = GOMP_MAP_TO;
13153 tkind_zero = tkind;
13154 break;
13155 case OMP_CLAUSE_FROM:
13156 tkind = GOMP_MAP_FROM;
13157 tkind_zero = tkind;
13158 break;
13159 default:
13160 gcc_unreachable ();
13162 gcc_checking_assert (tkind
13163 < (HOST_WIDE_INT_C (1U) << talign_shift));
13164 gcc_checking_assert (tkind_zero
13165 < (HOST_WIDE_INT_C (1U) << talign_shift));
13166 talign = ceil_log2 (talign);
13167 tkind |= talign << talign_shift;
13168 tkind_zero |= talign << talign_shift;
13169 gcc_checking_assert (tkind
13170 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13171 gcc_checking_assert (tkind_zero
13172 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13173 if (tkind == tkind_zero)
13174 x = build_int_cstu (tkind_type, tkind);
13175 else
13177 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13178 x = build3 (COND_EXPR, tkind_type,
13179 fold_build2 (EQ_EXPR, boolean_type_node,
13180 unshare_expr (s), size_zero_node),
13181 build_int_cstu (tkind_type, tkind_zero),
13182 build_int_cstu (tkind_type, tkind));
13184 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13185 if (nc && nc != c)
13186 c = nc;
13187 break;
13189 case OMP_CLAUSE_FIRSTPRIVATE:
13190 if (is_gimple_omp_oacc (ctx->stmt))
13191 goto oacc_firstprivate_map;
13192 ovar = OMP_CLAUSE_DECL (c);
13193 if (omp_privatize_by_reference (ovar))
13194 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13195 else
13196 talign = DECL_ALIGN_UNIT (ovar);
13197 var = lookup_decl_in_outer_ctx (ovar, ctx);
13198 x = build_sender_ref (ovar, ctx);
13199 tkind = GOMP_MAP_FIRSTPRIVATE;
13200 type = TREE_TYPE (ovar);
13201 if (omp_privatize_by_reference (ovar))
13202 type = TREE_TYPE (type);
13203 if ((INTEGRAL_TYPE_P (type)
13204 && TYPE_PRECISION (type) <= POINTER_SIZE)
13205 || TREE_CODE (type) == POINTER_TYPE)
13207 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13208 tree t = var;
13209 if (omp_privatize_by_reference (var))
13210 t = build_simple_mem_ref (var);
13211 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13212 suppress_warning (var);
13213 if (TREE_CODE (type) != POINTER_TYPE)
13214 t = fold_convert (pointer_sized_int_node, t);
13215 t = fold_convert (TREE_TYPE (x), t);
13216 gimplify_assign (x, t, &ilist);
13218 else if (omp_privatize_by_reference (var))
13219 gimplify_assign (x, var, &ilist);
13220 else if (is_gimple_reg (var))
13222 tree avar = create_tmp_var (TREE_TYPE (var));
13223 mark_addressable (avar);
13224 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13225 suppress_warning (var);
13226 gimplify_assign (avar, var, &ilist);
13227 avar = build_fold_addr_expr (avar);
13228 gimplify_assign (x, avar, &ilist);
13230 else
13232 var = build_fold_addr_expr (var);
13233 gimplify_assign (x, var, &ilist);
13235 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13236 s = size_int (0);
13237 else if (omp_privatize_by_reference (ovar))
13238 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13239 else
13240 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13241 s = fold_convert (size_type_node, s);
13242 purpose = size_int (map_idx++);
13243 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13244 if (TREE_CODE (s) != INTEGER_CST)
13245 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13247 gcc_checking_assert (tkind
13248 < (HOST_WIDE_INT_C (1U) << talign_shift));
13249 talign = ceil_log2 (talign);
13250 tkind |= talign << talign_shift;
13251 gcc_checking_assert (tkind
13252 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13253 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13254 build_int_cstu (tkind_type, tkind));
13255 break;
13257 case OMP_CLAUSE_USE_DEVICE_PTR:
13258 case OMP_CLAUSE_USE_DEVICE_ADDR:
13259 case OMP_CLAUSE_IS_DEVICE_PTR:
13260 ovar = OMP_CLAUSE_DECL (c);
13261 var = lookup_decl_in_outer_ctx (ovar, ctx);
13263 if (lang_hooks.decls.omp_array_data (ovar, true))
13265 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13266 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13267 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13269 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13271 tkind = GOMP_MAP_USE_DEVICE_PTR;
13272 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13274 else
13276 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13277 x = build_sender_ref (ovar, ctx);
13280 if (is_gimple_omp_oacc (ctx->stmt))
13282 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13284 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13285 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13288 type = TREE_TYPE (ovar);
13289 if (lang_hooks.decls.omp_array_data (ovar, true))
13290 var = lang_hooks.decls.omp_array_data (ovar, false);
13291 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13292 && !omp_privatize_by_reference (ovar)
13293 && !omp_is_allocatable_or_ptr (ovar))
13294 || TREE_CODE (type) == ARRAY_TYPE)
13295 var = build_fold_addr_expr (var);
13296 else
13298 if (omp_privatize_by_reference (ovar)
13299 || omp_check_optional_argument (ovar, false)
13300 || omp_is_allocatable_or_ptr (ovar))
13302 type = TREE_TYPE (type);
13303 if (POINTER_TYPE_P (type)
13304 && TREE_CODE (type) != ARRAY_TYPE
13305 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13306 && !omp_is_allocatable_or_ptr (ovar))
13307 || (omp_privatize_by_reference (ovar)
13308 && omp_is_allocatable_or_ptr (ovar))))
13309 var = build_simple_mem_ref (var);
13310 var = fold_convert (TREE_TYPE (x), var);
13313 tree present;
13314 present = omp_check_optional_argument (ovar, true);
13315 if (present)
13317 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13318 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13319 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13320 tree new_x = unshare_expr (x);
13321 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13322 fb_rvalue);
13323 gcond *cond = gimple_build_cond_from_tree (present,
13324 notnull_label,
13325 null_label);
13326 gimple_seq_add_stmt (&ilist, cond);
13327 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13328 gimplify_assign (new_x, null_pointer_node, &ilist);
13329 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13330 gimple_seq_add_stmt (&ilist,
13331 gimple_build_label (notnull_label));
13332 gimplify_assign (x, var, &ilist);
13333 gimple_seq_add_stmt (&ilist,
13334 gimple_build_label (opt_arg_label));
13336 else
13337 gimplify_assign (x, var, &ilist);
13338 s = size_int (0);
13339 purpose = size_int (map_idx++);
13340 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13341 gcc_checking_assert (tkind
13342 < (HOST_WIDE_INT_C (1U) << talign_shift));
13343 gcc_checking_assert (tkind
13344 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13345 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13346 build_int_cstu (tkind_type, tkind));
13347 break;
13350 gcc_assert (map_idx == map_cnt);
13352 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13353 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13354 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13355 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13356 for (int i = 1; i <= 2; i++)
13357 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13359 gimple_seq initlist = NULL;
13360 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13361 TREE_VEC_ELT (t, i)),
13362 &initlist, true, NULL_TREE);
13363 gimple_seq_add_seq (&ilist, initlist);
13365 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13366 gimple_seq_add_stmt (&olist,
13367 gimple_build_assign (TREE_VEC_ELT (t, i),
13368 clobber));
13370 else if (omp_maybe_offloaded_ctx (ctx->outer))
13372 tree id = get_identifier ("omp declare target");
13373 tree decl = TREE_VEC_ELT (t, i);
13374 DECL_ATTRIBUTES (decl)
13375 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13376 varpool_node *node = varpool_node::get (decl);
13377 if (node)
13379 node->offloadable = 1;
13380 if (ENABLE_OFFLOADING)
13382 g->have_offload = true;
13383 vec_safe_push (offload_vars, t);
13388 tree clobber = build_clobber (ctx->record_type);
13389 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13390 clobber));
13393 /* Once all the expansions are done, sequence all the different
13394 fragments inside gimple_omp_body. */
13396 new_body = NULL;
13398 if (offloaded
13399 && ctx->record_type)
13401 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13402 /* fixup_child_record_type might have changed receiver_decl's type. */
13403 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13404 gimple_seq_add_stmt (&new_body,
13405 gimple_build_assign (ctx->receiver_decl, t));
13407 gimple_seq_add_seq (&new_body, fplist);
13409 if (offloaded || data_region)
13411 tree prev = NULL_TREE;
13412 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13413 switch (OMP_CLAUSE_CODE (c))
13415 tree var, x;
13416 default:
13417 break;
13418 case OMP_CLAUSE_FIRSTPRIVATE:
13419 if (is_gimple_omp_oacc (ctx->stmt))
13420 break;
13421 var = OMP_CLAUSE_DECL (c);
13422 if (omp_privatize_by_reference (var)
13423 || is_gimple_reg_type (TREE_TYPE (var)))
13425 tree new_var = lookup_decl (var, ctx);
13426 tree type;
13427 type = TREE_TYPE (var);
13428 if (omp_privatize_by_reference (var))
13429 type = TREE_TYPE (type);
13430 if ((INTEGRAL_TYPE_P (type)
13431 && TYPE_PRECISION (type) <= POINTER_SIZE)
13432 || TREE_CODE (type) == POINTER_TYPE)
13434 x = build_receiver_ref (var, false, ctx);
13435 if (TREE_CODE (type) != POINTER_TYPE)
13436 x = fold_convert (pointer_sized_int_node, x);
13437 x = fold_convert (type, x);
13438 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13439 fb_rvalue);
13440 if (omp_privatize_by_reference (var))
13442 tree v = create_tmp_var_raw (type, get_name (var));
13443 gimple_add_tmp_var (v);
13444 TREE_ADDRESSABLE (v) = 1;
13445 gimple_seq_add_stmt (&new_body,
13446 gimple_build_assign (v, x));
13447 x = build_fold_addr_expr (v);
13449 gimple_seq_add_stmt (&new_body,
13450 gimple_build_assign (new_var, x));
13452 else
13454 bool by_ref = !omp_privatize_by_reference (var);
13455 x = build_receiver_ref (var, by_ref, ctx);
13456 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13457 fb_rvalue);
13458 gimple_seq_add_stmt (&new_body,
13459 gimple_build_assign (new_var, x));
13462 else if (is_variable_sized (var))
13464 tree pvar = DECL_VALUE_EXPR (var);
13465 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13466 pvar = TREE_OPERAND (pvar, 0);
13467 gcc_assert (DECL_P (pvar));
13468 tree new_var = lookup_decl (pvar, ctx);
13469 x = build_receiver_ref (var, false, ctx);
13470 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13471 gimple_seq_add_stmt (&new_body,
13472 gimple_build_assign (new_var, x));
13474 break;
13475 case OMP_CLAUSE_PRIVATE:
13476 if (is_gimple_omp_oacc (ctx->stmt))
13477 break;
13478 var = OMP_CLAUSE_DECL (c);
13479 if (omp_privatize_by_reference (var))
13481 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13482 tree new_var = lookup_decl (var, ctx);
13483 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13484 if (TREE_CONSTANT (x))
13486 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13487 get_name (var));
13488 gimple_add_tmp_var (x);
13489 TREE_ADDRESSABLE (x) = 1;
13490 x = build_fold_addr_expr_loc (clause_loc, x);
13492 else
13493 break;
13495 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13496 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13497 gimple_seq_add_stmt (&new_body,
13498 gimple_build_assign (new_var, x));
13500 break;
13501 case OMP_CLAUSE_USE_DEVICE_PTR:
13502 case OMP_CLAUSE_USE_DEVICE_ADDR:
13503 case OMP_CLAUSE_IS_DEVICE_PTR:
13504 tree new_var;
13505 gimple_seq assign_body;
13506 bool is_array_data;
13507 bool do_optional_check;
13508 assign_body = NULL;
13509 do_optional_check = false;
13510 var = OMP_CLAUSE_DECL (c);
13511 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13513 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13514 x = build_sender_ref (is_array_data
13515 ? (splay_tree_key) &DECL_NAME (var)
13516 : (splay_tree_key) &DECL_UID (var), ctx);
13517 else
13518 x = build_receiver_ref (var, false, ctx);
13520 if (is_array_data)
13522 bool is_ref = omp_privatize_by_reference (var);
13523 do_optional_check = true;
13524 /* First, we copy the descriptor data from the host; then
13525 we update its data to point to the target address. */
13526 new_var = lookup_decl (var, ctx);
13527 new_var = DECL_VALUE_EXPR (new_var);
13528 tree v = new_var;
13530 if (is_ref)
13532 var = build_fold_indirect_ref (var);
13533 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
13534 fb_rvalue);
13535 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
13536 gimple_add_tmp_var (v);
13537 TREE_ADDRESSABLE (v) = 1;
13538 gimple_seq_add_stmt (&assign_body,
13539 gimple_build_assign (v, var));
13540 tree rhs = build_fold_addr_expr (v);
13541 gimple_seq_add_stmt (&assign_body,
13542 gimple_build_assign (new_var, rhs));
13544 else
13545 gimple_seq_add_stmt (&assign_body,
13546 gimple_build_assign (new_var, var));
13548 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13549 gcc_assert (v2);
13550 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13551 gimple_seq_add_stmt (&assign_body,
13552 gimple_build_assign (v2, x));
13554 else if (is_variable_sized (var))
13556 tree pvar = DECL_VALUE_EXPR (var);
13557 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13558 pvar = TREE_OPERAND (pvar, 0);
13559 gcc_assert (DECL_P (pvar));
13560 new_var = lookup_decl (pvar, ctx);
13561 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13562 gimple_seq_add_stmt (&assign_body,
13563 gimple_build_assign (new_var, x));
13565 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13566 && !omp_privatize_by_reference (var)
13567 && !omp_is_allocatable_or_ptr (var))
13568 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13570 new_var = lookup_decl (var, ctx);
13571 new_var = DECL_VALUE_EXPR (new_var);
13572 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13573 new_var = TREE_OPERAND (new_var, 0);
13574 gcc_assert (DECL_P (new_var));
13575 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13576 gimple_seq_add_stmt (&assign_body,
13577 gimple_build_assign (new_var, x));
13579 else
13581 tree type = TREE_TYPE (var);
13582 new_var = lookup_decl (var, ctx);
13583 if (omp_privatize_by_reference (var))
13585 type = TREE_TYPE (type);
13586 if (POINTER_TYPE_P (type)
13587 && TREE_CODE (type) != ARRAY_TYPE
13588 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13589 || (omp_privatize_by_reference (var)
13590 && omp_is_allocatable_or_ptr (var))))
13592 tree v = create_tmp_var_raw (type, get_name (var));
13593 gimple_add_tmp_var (v);
13594 TREE_ADDRESSABLE (v) = 1;
13595 x = fold_convert (type, x);
13596 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13597 fb_rvalue);
13598 gimple_seq_add_stmt (&assign_body,
13599 gimple_build_assign (v, x));
13600 x = build_fold_addr_expr (v);
13601 do_optional_check = true;
13604 new_var = DECL_VALUE_EXPR (new_var);
13605 x = fold_convert (TREE_TYPE (new_var), x);
13606 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13607 gimple_seq_add_stmt (&assign_body,
13608 gimple_build_assign (new_var, x));
13610 tree present;
13611 present = (do_optional_check
13612 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13613 : NULL_TREE);
13614 if (present)
13616 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13617 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13618 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13619 glabel *null_glabel = gimple_build_label (null_label);
13620 glabel *notnull_glabel = gimple_build_label (notnull_label);
13621 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13622 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13623 fb_rvalue);
13624 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13625 fb_rvalue);
13626 gcond *cond = gimple_build_cond_from_tree (present,
13627 notnull_label,
13628 null_label);
13629 gimple_seq_add_stmt (&new_body, cond);
13630 gimple_seq_add_stmt (&new_body, null_glabel);
13631 gimplify_assign (new_var, null_pointer_node, &new_body);
13632 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13633 gimple_seq_add_stmt (&new_body, notnull_glabel);
13634 gimple_seq_add_seq (&new_body, assign_body);
13635 gimple_seq_add_stmt (&new_body,
13636 gimple_build_label (opt_arg_label));
13638 else
13639 gimple_seq_add_seq (&new_body, assign_body);
13640 break;
13642 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13643 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13644 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13645 or references to VLAs. */
13646 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13647 switch (OMP_CLAUSE_CODE (c))
13649 tree var;
13650 default:
13651 break;
13652 case OMP_CLAUSE_MAP:
13653 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13654 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13656 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13657 poly_int64 offset = 0;
13658 gcc_assert (prev);
13659 var = OMP_CLAUSE_DECL (c);
13660 if (DECL_P (var)
13661 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13662 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13663 ctx))
13664 && varpool_node::get_create (var)->offloadable)
13665 break;
13666 if (TREE_CODE (var) == INDIRECT_REF
13667 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
13668 var = TREE_OPERAND (var, 0);
13669 if (TREE_CODE (var) == COMPONENT_REF)
13671 var = get_addr_base_and_unit_offset (var, &offset);
13672 gcc_assert (var != NULL_TREE && DECL_P (var));
13674 else if (DECL_SIZE (var)
13675 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
13677 tree var2 = DECL_VALUE_EXPR (var);
13678 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
13679 var2 = TREE_OPERAND (var2, 0);
13680 gcc_assert (DECL_P (var2));
13681 var = var2;
13683 tree new_var = lookup_decl (var, ctx), x;
13684 tree type = TREE_TYPE (new_var);
13685 bool is_ref;
13686 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
13687 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13688 == COMPONENT_REF))
13690 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
13691 is_ref = true;
13692 new_var = build2 (MEM_REF, type,
13693 build_fold_addr_expr (new_var),
13694 build_int_cst (build_pointer_type (type),
13695 offset));
13697 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
13699 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
13700 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
13701 new_var = build2 (MEM_REF, type,
13702 build_fold_addr_expr (new_var),
13703 build_int_cst (build_pointer_type (type),
13704 offset));
13706 else
13707 is_ref = omp_privatize_by_reference (var);
13708 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13709 is_ref = false;
13710 bool ref_to_array = false;
13711 if (is_ref)
13713 type = TREE_TYPE (type);
13714 if (TREE_CODE (type) == ARRAY_TYPE)
13716 type = build_pointer_type (type);
13717 ref_to_array = true;
13720 else if (TREE_CODE (type) == ARRAY_TYPE)
13722 tree decl2 = DECL_VALUE_EXPR (new_var);
13723 gcc_assert (TREE_CODE (decl2) == MEM_REF);
13724 decl2 = TREE_OPERAND (decl2, 0);
13725 gcc_assert (DECL_P (decl2));
13726 new_var = decl2;
13727 type = TREE_TYPE (new_var);
13729 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
13730 x = fold_convert_loc (clause_loc, type, x);
13731 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
13733 tree bias = OMP_CLAUSE_SIZE (c);
13734 if (DECL_P (bias))
13735 bias = lookup_decl (bias, ctx);
13736 bias = fold_convert_loc (clause_loc, sizetype, bias);
13737 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
13738 bias);
13739 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
13740 TREE_TYPE (x), x, bias);
13742 if (ref_to_array)
13743 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13744 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13745 if (is_ref && !ref_to_array)
13747 tree t = create_tmp_var_raw (type, get_name (var));
13748 gimple_add_tmp_var (t);
13749 TREE_ADDRESSABLE (t) = 1;
13750 gimple_seq_add_stmt (&new_body,
13751 gimple_build_assign (t, x));
13752 x = build_fold_addr_expr_loc (clause_loc, t);
13754 gimple_seq_add_stmt (&new_body,
13755 gimple_build_assign (new_var, x));
13756 prev = NULL_TREE;
13758 else if (OMP_CLAUSE_CHAIN (c)
13759 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
13760 == OMP_CLAUSE_MAP
13761 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13762 == GOMP_MAP_FIRSTPRIVATE_POINTER
13763 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13764 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13765 prev = c;
13766 break;
13767 case OMP_CLAUSE_PRIVATE:
13768 var = OMP_CLAUSE_DECL (c);
13769 if (is_variable_sized (var))
13771 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13772 tree new_var = lookup_decl (var, ctx);
13773 tree pvar = DECL_VALUE_EXPR (var);
13774 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13775 pvar = TREE_OPERAND (pvar, 0);
13776 gcc_assert (DECL_P (pvar));
13777 tree new_pvar = lookup_decl (pvar, ctx);
13778 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13779 tree al = size_int (DECL_ALIGN (var));
13780 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
13781 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13782 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
13783 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13784 gimple_seq_add_stmt (&new_body,
13785 gimple_build_assign (new_pvar, x));
13787 else if (omp_privatize_by_reference (var)
13788 && !is_gimple_omp_oacc (ctx->stmt))
13790 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13791 tree new_var = lookup_decl (var, ctx);
13792 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13793 if (TREE_CONSTANT (x))
13794 break;
13795 else
13797 tree atmp
13798 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13799 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
13800 tree al = size_int (TYPE_ALIGN (rtype));
13801 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13804 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13805 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13806 gimple_seq_add_stmt (&new_body,
13807 gimple_build_assign (new_var, x));
13809 break;
13812 gimple_seq fork_seq = NULL;
13813 gimple_seq join_seq = NULL;
13815 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
13817 /* If there are reductions on the offloaded region itself, treat
13818 them as a dummy GANG loop. */
13819 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
13821 gcall *private_marker = lower_oacc_private_marker (ctx);
13823 if (private_marker)
13824 gimple_call_set_arg (private_marker, 2, level);
13826 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
13827 false, NULL, private_marker, NULL, &fork_seq,
13828 &join_seq, ctx);
13831 gimple_seq_add_seq (&new_body, fork_seq);
13832 gimple_seq_add_seq (&new_body, tgt_body);
13833 gimple_seq_add_seq (&new_body, join_seq);
13835 if (offloaded)
13837 new_body = maybe_catch_exception (new_body);
13838 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
13840 gimple_omp_set_body (stmt, new_body);
13843 bind = gimple_build_bind (NULL, NULL,
13844 tgt_bind ? gimple_bind_block (tgt_bind)
13845 : NULL_TREE);
13846 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
13847 gimple_bind_add_seq (bind, ilist);
13848 gimple_bind_add_stmt (bind, stmt);
13849 gimple_bind_add_seq (bind, olist);
13851 pop_gimplify_context (NULL);
13853 if (dep_bind)
13855 gimple_bind_add_seq (dep_bind, dep_ilist);
13856 gimple_bind_add_stmt (dep_bind, bind);
13857 gimple_bind_add_seq (dep_bind, dep_olist);
13858 pop_gimplify_context (dep_bind);
13862 /* Expand code for an OpenMP teams directive. */
13864 static void
13865 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13867 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
13868 push_gimplify_context ();
13870 tree block = make_node (BLOCK);
13871 gbind *bind = gimple_build_bind (NULL, NULL, block);
13872 gsi_replace (gsi_p, bind, true);
13873 gimple_seq bind_body = NULL;
13874 gimple_seq dlist = NULL;
13875 gimple_seq olist = NULL;
13877 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13878 OMP_CLAUSE_NUM_TEAMS);
13879 if (num_teams == NULL_TREE)
13880 num_teams = build_int_cst (unsigned_type_node, 0);
13881 else
13883 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
13884 num_teams = fold_convert (unsigned_type_node, num_teams);
13885 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
13887 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13888 OMP_CLAUSE_THREAD_LIMIT);
13889 if (thread_limit == NULL_TREE)
13890 thread_limit = build_int_cst (unsigned_type_node, 0);
13891 else
13893 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
13894 thread_limit = fold_convert (unsigned_type_node, thread_limit);
13895 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
13896 fb_rvalue);
13899 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
13900 &bind_body, &dlist, ctx, NULL);
13901 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
13902 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
13903 NULL, ctx);
13904 gimple_seq_add_stmt (&bind_body, teams_stmt);
13906 location_t loc = gimple_location (teams_stmt);
13907 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
13908 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
13909 gimple_set_location (call, loc);
13910 gimple_seq_add_stmt (&bind_body, call);
13912 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
13913 gimple_omp_set_body (teams_stmt, NULL);
13914 gimple_seq_add_seq (&bind_body, olist);
13915 gimple_seq_add_seq (&bind_body, dlist);
13916 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
13917 gimple_bind_set_body (bind, bind_body);
13919 pop_gimplify_context (bind);
13921 gimple_bind_append_vars (bind, ctx->block_vars);
13922 BLOCK_VARS (block) = ctx->block_vars;
13923 if (BLOCK_VARS (block))
13924 TREE_USED (block) = 1;
13927 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
13928 regimplified. If DATA is non-NULL, lower_omp_1 is outside
13929 of OMP context, but with task_shared_vars set. */
13931 static tree
13932 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
13933 void *data)
13935 tree t = *tp;
13937 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
13938 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
13939 && data == NULL
13940 && DECL_HAS_VALUE_EXPR_P (t))
13941 return t;
13943 if (task_shared_vars
13944 && DECL_P (t)
13945 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
13946 return t;
13948 /* If a global variable has been privatized, TREE_CONSTANT on
13949 ADDR_EXPR might be wrong. */
13950 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
13951 recompute_tree_invariant_for_addr_expr (t);
13953 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
13954 return NULL_TREE;
13957 /* Data to be communicated between lower_omp_regimplify_operands and
13958 lower_omp_regimplify_operands_p. */
13960 struct lower_omp_regimplify_operands_data
13962 omp_context *ctx;
13963 vec<tree> *decls;
13966 /* Helper function for lower_omp_regimplify_operands. Find
13967 omp_member_access_dummy_var vars and adjust temporarily their
13968 DECL_VALUE_EXPRs if needed. */
13970 static tree
13971 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
13972 void *data)
13974 tree t = omp_member_access_dummy_var (*tp);
13975 if (t)
13977 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
13978 lower_omp_regimplify_operands_data *ldata
13979 = (lower_omp_regimplify_operands_data *) wi->info;
13980 tree o = maybe_lookup_decl (t, ldata->ctx);
13981 if (o != t)
13983 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
13984 ldata->decls->safe_push (*tp);
13985 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
13986 SET_DECL_VALUE_EXPR (*tp, v);
13989 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
13990 return NULL_TREE;
13993 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
13994 of omp_member_access_dummy_var vars during regimplification. */
13996 static void
13997 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
13998 gimple_stmt_iterator *gsi_p)
14000 auto_vec<tree, 10> decls;
14001 if (ctx)
14003 struct walk_stmt_info wi;
14004 memset (&wi, '\0', sizeof (wi));
14005 struct lower_omp_regimplify_operands_data data;
14006 data.ctx = ctx;
14007 data.decls = &decls;
14008 wi.info = &data;
14009 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14011 gimple_regimplify_operands (stmt, gsi_p);
14012 while (!decls.is_empty ())
14014 tree t = decls.pop ();
14015 tree v = decls.pop ();
14016 SET_DECL_VALUE_EXPR (t, v);
14020 static void
14021 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14023 gimple *stmt = gsi_stmt (*gsi_p);
14024 struct walk_stmt_info wi;
14025 gcall *call_stmt;
14027 if (gimple_has_location (stmt))
14028 input_location = gimple_location (stmt);
14030 if (task_shared_vars)
14031 memset (&wi, '\0', sizeof (wi));
14033 /* If we have issued syntax errors, avoid doing any heavy lifting.
14034 Just replace the OMP directives with a NOP to avoid
14035 confusing RTL expansion. */
14036 if (seen_error () && is_gimple_omp (stmt))
14038 gsi_replace (gsi_p, gimple_build_nop (), true);
14039 return;
14042 switch (gimple_code (stmt))
14044 case GIMPLE_COND:
14046 gcond *cond_stmt = as_a <gcond *> (stmt);
14047 if ((ctx || task_shared_vars)
14048 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14049 lower_omp_regimplify_p,
14050 ctx ? NULL : &wi, NULL)
14051 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14052 lower_omp_regimplify_p,
14053 ctx ? NULL : &wi, NULL)))
14054 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14056 break;
14057 case GIMPLE_CATCH:
14058 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14059 break;
14060 case GIMPLE_EH_FILTER:
14061 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14062 break;
14063 case GIMPLE_TRY:
14064 lower_omp (gimple_try_eval_ptr (stmt), ctx);
14065 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14066 break;
14067 case GIMPLE_TRANSACTION:
14068 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14069 ctx);
14070 break;
14071 case GIMPLE_BIND:
14072 if (ctx && is_gimple_omp_oacc (ctx->stmt))
14074 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14075 oacc_privatization_scan_decl_chain (ctx, vars);
14077 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14078 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14079 break;
14080 case GIMPLE_OMP_PARALLEL:
14081 case GIMPLE_OMP_TASK:
14082 ctx = maybe_lookup_ctx (stmt);
14083 gcc_assert (ctx);
14084 if (ctx->cancellable)
14085 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14086 lower_omp_taskreg (gsi_p, ctx);
14087 break;
14088 case GIMPLE_OMP_FOR:
14089 ctx = maybe_lookup_ctx (stmt);
14090 gcc_assert (ctx);
14091 if (ctx->cancellable)
14092 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14093 lower_omp_for (gsi_p, ctx);
14094 break;
14095 case GIMPLE_OMP_SECTIONS:
14096 ctx = maybe_lookup_ctx (stmt);
14097 gcc_assert (ctx);
14098 if (ctx->cancellable)
14099 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14100 lower_omp_sections (gsi_p, ctx);
14101 break;
14102 case GIMPLE_OMP_SCOPE:
14103 ctx = maybe_lookup_ctx (stmt);
14104 gcc_assert (ctx);
14105 lower_omp_scope (gsi_p, ctx);
14106 break;
14107 case GIMPLE_OMP_SINGLE:
14108 ctx = maybe_lookup_ctx (stmt);
14109 gcc_assert (ctx);
14110 lower_omp_single (gsi_p, ctx);
14111 break;
14112 case GIMPLE_OMP_MASTER:
14113 case GIMPLE_OMP_MASKED:
14114 ctx = maybe_lookup_ctx (stmt);
14115 gcc_assert (ctx);
14116 lower_omp_master (gsi_p, ctx);
14117 break;
14118 case GIMPLE_OMP_TASKGROUP:
14119 ctx = maybe_lookup_ctx (stmt);
14120 gcc_assert (ctx);
14121 lower_omp_taskgroup (gsi_p, ctx);
14122 break;
14123 case GIMPLE_OMP_ORDERED:
14124 ctx = maybe_lookup_ctx (stmt);
14125 gcc_assert (ctx);
14126 lower_omp_ordered (gsi_p, ctx);
14127 break;
14128 case GIMPLE_OMP_SCAN:
14129 ctx = maybe_lookup_ctx (stmt);
14130 gcc_assert (ctx);
14131 lower_omp_scan (gsi_p, ctx);
14132 break;
14133 case GIMPLE_OMP_CRITICAL:
14134 ctx = maybe_lookup_ctx (stmt);
14135 gcc_assert (ctx);
14136 lower_omp_critical (gsi_p, ctx);
14137 break;
14138 case GIMPLE_OMP_ATOMIC_LOAD:
14139 if ((ctx || task_shared_vars)
14140 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14141 as_a <gomp_atomic_load *> (stmt)),
14142 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14143 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14144 break;
14145 case GIMPLE_OMP_TARGET:
14146 ctx = maybe_lookup_ctx (stmt);
14147 gcc_assert (ctx);
14148 lower_omp_target (gsi_p, ctx);
14149 break;
14150 case GIMPLE_OMP_TEAMS:
14151 ctx = maybe_lookup_ctx (stmt);
14152 gcc_assert (ctx);
14153 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14154 lower_omp_taskreg (gsi_p, ctx);
14155 else
14156 lower_omp_teams (gsi_p, ctx);
14157 break;
14158 case GIMPLE_CALL:
14159 tree fndecl;
14160 call_stmt = as_a <gcall *> (stmt);
14161 fndecl = gimple_call_fndecl (call_stmt);
14162 if (fndecl
14163 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14164 switch (DECL_FUNCTION_CODE (fndecl))
14166 case BUILT_IN_GOMP_BARRIER:
14167 if (ctx == NULL)
14168 break;
14169 /* FALLTHRU */
14170 case BUILT_IN_GOMP_CANCEL:
14171 case BUILT_IN_GOMP_CANCELLATION_POINT:
14172 omp_context *cctx;
14173 cctx = ctx;
14174 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14175 cctx = cctx->outer;
14176 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14177 if (!cctx->cancellable)
14179 if (DECL_FUNCTION_CODE (fndecl)
14180 == BUILT_IN_GOMP_CANCELLATION_POINT)
14182 stmt = gimple_build_nop ();
14183 gsi_replace (gsi_p, stmt, false);
14185 break;
14187 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14189 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14190 gimple_call_set_fndecl (call_stmt, fndecl);
14191 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14193 tree lhs;
14194 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14195 gimple_call_set_lhs (call_stmt, lhs);
14196 tree fallthru_label;
14197 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14198 gimple *g;
14199 g = gimple_build_label (fallthru_label);
14200 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14201 g = gimple_build_cond (NE_EXPR, lhs,
14202 fold_convert (TREE_TYPE (lhs),
14203 boolean_false_node),
14204 cctx->cancel_label, fallthru_label);
14205 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14206 break;
14207 default:
14208 break;
14210 goto regimplify;
14212 case GIMPLE_ASSIGN:
14213 for (omp_context *up = ctx; up; up = up->outer)
14215 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14216 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14217 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14218 || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14219 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14220 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14221 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14222 && (gimple_omp_target_kind (up->stmt)
14223 == GF_OMP_TARGET_KIND_DATA)))
14224 continue;
14225 else if (!up->lastprivate_conditional_map)
14226 break;
14227 tree lhs = get_base_address (gimple_assign_lhs (stmt));
14228 if (TREE_CODE (lhs) == MEM_REF
14229 && DECL_P (TREE_OPERAND (lhs, 0))
14230 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14231 0))) == REFERENCE_TYPE)
14232 lhs = TREE_OPERAND (lhs, 0);
14233 if (DECL_P (lhs))
14234 if (tree *v = up->lastprivate_conditional_map->get (lhs))
14236 tree clauses;
14237 if (up->combined_into_simd_safelen1)
14239 up = up->outer;
14240 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14241 up = up->outer;
14243 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14244 clauses = gimple_omp_for_clauses (up->stmt);
14245 else
14246 clauses = gimple_omp_sections_clauses (up->stmt);
14247 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14248 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14249 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14250 OMP_CLAUSE__CONDTEMP_);
14251 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14252 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14253 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14256 /* FALLTHRU */
14258 default:
14259 regimplify:
14260 if ((ctx || task_shared_vars)
14261 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14262 ctx ? NULL : &wi))
14264 /* Just remove clobbers, this should happen only if we have
14265 "privatized" local addressable variables in SIMD regions,
14266 the clobber isn't needed in that case and gimplifying address
14267 of the ARRAY_REF into a pointer and creating MEM_REF based
14268 clobber would create worse code than we get with the clobber
14269 dropped. */
14270 if (gimple_clobber_p (stmt))
14272 gsi_replace (gsi_p, gimple_build_nop (), true);
14273 break;
14275 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14277 break;
14281 static void
14282 lower_omp (gimple_seq *body, omp_context *ctx)
14284 location_t saved_location = input_location;
14285 gimple_stmt_iterator gsi;
14286 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14287 lower_omp_1 (&gsi, ctx);
14288 /* During gimplification, we haven't folded statments inside offloading
14289 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
14290 if (target_nesting_level || taskreg_nesting_level)
14291 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14292 fold_stmt (&gsi);
14293 input_location = saved_location;
14296 /* Main entry point. */
14298 static unsigned int
14299 execute_lower_omp (void)
14301 gimple_seq body;
14302 int i;
14303 omp_context *ctx;
14305 /* This pass always runs, to provide PROP_gimple_lomp.
14306 But often, there is nothing to do. */
14307 if (flag_openacc == 0 && flag_openmp == 0
14308 && flag_openmp_simd == 0)
14309 return 0;
14311 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14312 delete_omp_context);
14314 body = gimple_body (current_function_decl);
14316 scan_omp (&body, NULL);
14317 gcc_assert (taskreg_nesting_level == 0);
14318 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14319 finish_taskreg_scan (ctx);
14320 taskreg_contexts.release ();
14322 if (all_contexts->root)
14324 if (task_shared_vars)
14325 push_gimplify_context ();
14326 lower_omp (&body, NULL);
14327 if (task_shared_vars)
14328 pop_gimplify_context (NULL);
14331 if (all_contexts)
14333 splay_tree_delete (all_contexts);
14334 all_contexts = NULL;
14336 BITMAP_FREE (task_shared_vars);
14337 BITMAP_FREE (global_nonaddressable_vars);
14339 /* If current function is a method, remove artificial dummy VAR_DECL created
14340 for non-static data member privatization, they aren't needed for
14341 debuginfo nor anything else, have been already replaced everywhere in the
14342 IL and cause problems with LTO. */
14343 if (DECL_ARGUMENTS (current_function_decl)
14344 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14345 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14346 == POINTER_TYPE))
14347 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14348 return 0;
14351 namespace {
14353 const pass_data pass_data_lower_omp =
14355 GIMPLE_PASS, /* type */
14356 "omplower", /* name */
14357 OPTGROUP_OMP, /* optinfo_flags */
14358 TV_NONE, /* tv_id */
14359 PROP_gimple_any, /* properties_required */
14360 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14361 0, /* properties_destroyed */
14362 0, /* todo_flags_start */
14363 0, /* todo_flags_finish */
14366 class pass_lower_omp : public gimple_opt_pass
14368 public:
14369 pass_lower_omp (gcc::context *ctxt)
14370 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14373 /* opt_pass methods: */
14374 virtual unsigned int execute (function *) { return execute_lower_omp (); }
14376 }; // class pass_lower_omp
14378 } // anon namespace
14380 gimple_opt_pass *
14381 make_pass_lower_omp (gcc::context *ctxt)
14383 return new pass_lower_omp (ctxt);
14386 /* The following is a utility to diagnose structured block violations.
14387 It is not part of the "omplower" pass, as that's invoked too late. It
14388 should be invoked by the respective front ends after gimplification. */
14390 static splay_tree all_labels;
14392 /* Check for mismatched contexts and generate an error if needed. Return
14393 true if an error is detected. */
14395 static bool
14396 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14397 gimple *branch_ctx, gimple *label_ctx)
14399 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14400 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14402 if (label_ctx == branch_ctx)
14403 return false;
14405 const char* kind = NULL;
14407 if (flag_openacc)
14409 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14410 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14412 gcc_checking_assert (kind == NULL);
14413 kind = "OpenACC";
14416 if (kind == NULL)
14418 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14419 kind = "OpenMP";
14422 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14423 so we could traverse it and issue a correct "exit" or "enter" error
14424 message upon a structured block violation.
14426 We built the context by building a list with tree_cons'ing, but there is
14427 no easy counterpart in gimple tuples. It seems like far too much work
14428 for issuing exit/enter error messages. If someone really misses the
14429 distinct error message... patches welcome. */
14431 #if 0
14432 /* Try to avoid confusing the user by producing and error message
14433 with correct "exit" or "enter" verbiage. We prefer "exit"
14434 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14435 if (branch_ctx == NULL)
14436 exit_p = false;
14437 else
14439 while (label_ctx)
14441 if (TREE_VALUE (label_ctx) == branch_ctx)
14443 exit_p = false;
14444 break;
14446 label_ctx = TREE_CHAIN (label_ctx);
14450 if (exit_p)
14451 error ("invalid exit from %s structured block", kind);
14452 else
14453 error ("invalid entry to %s structured block", kind);
14454 #endif
14456 /* If it's obvious we have an invalid entry, be specific about the error. */
14457 if (branch_ctx == NULL)
14458 error ("invalid entry to %s structured block", kind);
14459 else
14461 /* Otherwise, be vague and lazy, but efficient. */
14462 error ("invalid branch to/from %s structured block", kind);
14465 gsi_replace (gsi_p, gimple_build_nop (), false);
14466 return true;
14469 /* Pass 1: Create a minimal tree of structured blocks, and record
14470 where each label is found. */
14472 static tree
14473 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14474 struct walk_stmt_info *wi)
14476 gimple *context = (gimple *) wi->info;
14477 gimple *inner_context;
14478 gimple *stmt = gsi_stmt (*gsi_p);
14480 *handled_ops_p = true;
14482 switch (gimple_code (stmt))
14484 WALK_SUBSTMTS;
14486 case GIMPLE_OMP_PARALLEL:
14487 case GIMPLE_OMP_TASK:
14488 case GIMPLE_OMP_SCOPE:
14489 case GIMPLE_OMP_SECTIONS:
14490 case GIMPLE_OMP_SINGLE:
14491 case GIMPLE_OMP_SECTION:
14492 case GIMPLE_OMP_MASTER:
14493 case GIMPLE_OMP_MASKED:
14494 case GIMPLE_OMP_ORDERED:
14495 case GIMPLE_OMP_SCAN:
14496 case GIMPLE_OMP_CRITICAL:
14497 case GIMPLE_OMP_TARGET:
14498 case GIMPLE_OMP_TEAMS:
14499 case GIMPLE_OMP_TASKGROUP:
14500 /* The minimal context here is just the current OMP construct. */
14501 inner_context = stmt;
14502 wi->info = inner_context;
14503 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14504 wi->info = context;
14505 break;
14507 case GIMPLE_OMP_FOR:
14508 inner_context = stmt;
14509 wi->info = inner_context;
14510 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14511 walk them. */
14512 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14513 diagnose_sb_1, NULL, wi);
14514 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14515 wi->info = context;
14516 break;
14518 case GIMPLE_LABEL:
14519 splay_tree_insert (all_labels,
14520 (splay_tree_key) gimple_label_label (
14521 as_a <glabel *> (stmt)),
14522 (splay_tree_value) context);
14523 break;
14525 default:
14526 break;
14529 return NULL_TREE;
14532 /* Pass 2: Check each branch and see if its context differs from that of
14533 the destination label's context. */
14535 static tree
14536 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14537 struct walk_stmt_info *wi)
14539 gimple *context = (gimple *) wi->info;
14540 splay_tree_node n;
14541 gimple *stmt = gsi_stmt (*gsi_p);
14543 *handled_ops_p = true;
14545 switch (gimple_code (stmt))
14547 WALK_SUBSTMTS;
14549 case GIMPLE_OMP_PARALLEL:
14550 case GIMPLE_OMP_TASK:
14551 case GIMPLE_OMP_SCOPE:
14552 case GIMPLE_OMP_SECTIONS:
14553 case GIMPLE_OMP_SINGLE:
14554 case GIMPLE_OMP_SECTION:
14555 case GIMPLE_OMP_MASTER:
14556 case GIMPLE_OMP_MASKED:
14557 case GIMPLE_OMP_ORDERED:
14558 case GIMPLE_OMP_SCAN:
14559 case GIMPLE_OMP_CRITICAL:
14560 case GIMPLE_OMP_TARGET:
14561 case GIMPLE_OMP_TEAMS:
14562 case GIMPLE_OMP_TASKGROUP:
14563 wi->info = stmt;
14564 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14565 wi->info = context;
14566 break;
14568 case GIMPLE_OMP_FOR:
14569 wi->info = stmt;
14570 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14571 walk them. */
14572 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14573 diagnose_sb_2, NULL, wi);
14574 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14575 wi->info = context;
14576 break;
14578 case GIMPLE_COND:
14580 gcond *cond_stmt = as_a <gcond *> (stmt);
14581 tree lab = gimple_cond_true_label (cond_stmt);
14582 if (lab)
14584 n = splay_tree_lookup (all_labels,
14585 (splay_tree_key) lab);
14586 diagnose_sb_0 (gsi_p, context,
14587 n ? (gimple *) n->value : NULL);
14589 lab = gimple_cond_false_label (cond_stmt);
14590 if (lab)
14592 n = splay_tree_lookup (all_labels,
14593 (splay_tree_key) lab);
14594 diagnose_sb_0 (gsi_p, context,
14595 n ? (gimple *) n->value : NULL);
14598 break;
14600 case GIMPLE_GOTO:
14602 tree lab = gimple_goto_dest (stmt);
14603 if (TREE_CODE (lab) != LABEL_DECL)
14604 break;
14606 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14607 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14609 break;
14611 case GIMPLE_SWITCH:
14613 gswitch *switch_stmt = as_a <gswitch *> (stmt);
14614 unsigned int i;
14615 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
14617 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
14618 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14619 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
14620 break;
14623 break;
14625 case GIMPLE_RETURN:
14626 diagnose_sb_0 (gsi_p, context, NULL);
14627 break;
14629 default:
14630 break;
14633 return NULL_TREE;
14636 static unsigned int
14637 diagnose_omp_structured_block_errors (void)
14639 struct walk_stmt_info wi;
14640 gimple_seq body = gimple_body (current_function_decl);
14642 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
14644 memset (&wi, 0, sizeof (wi));
14645 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
14647 memset (&wi, 0, sizeof (wi));
14648 wi.want_locations = true;
14649 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
14651 gimple_set_body (current_function_decl, body);
14653 splay_tree_delete (all_labels);
14654 all_labels = NULL;
14656 return 0;
14659 namespace {
14661 const pass_data pass_data_diagnose_omp_blocks =
14663 GIMPLE_PASS, /* type */
14664 "*diagnose_omp_blocks", /* name */
14665 OPTGROUP_OMP, /* optinfo_flags */
14666 TV_NONE, /* tv_id */
14667 PROP_gimple_any, /* properties_required */
14668 0, /* properties_provided */
14669 0, /* properties_destroyed */
14670 0, /* todo_flags_start */
14671 0, /* todo_flags_finish */
14674 class pass_diagnose_omp_blocks : public gimple_opt_pass
14676 public:
14677 pass_diagnose_omp_blocks (gcc::context *ctxt)
14678 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
14681 /* opt_pass methods: */
14682 virtual bool gate (function *)
14684 return flag_openacc || flag_openmp || flag_openmp_simd;
14686 virtual unsigned int execute (function *)
14688 return diagnose_omp_structured_block_errors ();
14691 }; // class pass_diagnose_omp_blocks
14693 } // anon namespace
14695 gimple_opt_pass *
14696 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
14698 return new pass_diagnose_omp_blocks (ctxt);
14702 #include "gt-omp-low.h"