Don't warn when alignment of global common data exceeds maximum alignment.
[official-gcc.git] / gcc / omp-low.c
bloba0b41afa3ebff5e04b0d515b4241a2ab5e67a4d9
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2021 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
154 /* True if this construct can be cancelled. */
155 bool cancellable;
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap task_shared_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
195 static void scan_omp (gimple_seq *, omp_context *);
196 static tree scan_omp_1_op (tree *, int *, void *);
198 #define WALK_SUBSTMTS \
199 case GIMPLE_BIND: \
200 case GIMPLE_TRY: \
201 case GIMPLE_CATCH: \
202 case GIMPLE_EH_FILTER: \
203 case GIMPLE_TRANSACTION: \
204 /* The sub-statements for these should be walked. */ \
205 *handled_ops_p = false; \
206 break;
208 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
209 (This doesn't include OpenACC 'kernels' decomposed parts.) */
211 static bool
212 is_oacc_parallel_or_serial (omp_context *ctx)
214 enum gimple_code outer_type = gimple_code (ctx->stmt);
215 return ((outer_type == GIMPLE_OMP_TARGET)
216 && ((gimple_omp_target_kind (ctx->stmt)
217 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
218 || (gimple_omp_target_kind (ctx->stmt)
219 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
222 /* Return whether CTX represents an OpenACC 'kernels' construct.
223 (This doesn't include OpenACC 'kernels' decomposed parts.) */
225 static bool
226 is_oacc_kernels (omp_context *ctx)
228 enum gimple_code outer_type = gimple_code (ctx->stmt);
229 return ((outer_type == GIMPLE_OMP_TARGET)
230 && (gimple_omp_target_kind (ctx->stmt)
231 == GF_OMP_TARGET_KIND_OACC_KERNELS));
234 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
236 static bool
237 is_oacc_kernels_decomposed_part (omp_context *ctx)
239 enum gimple_code outer_type = gimple_code (ctx->stmt);
240 return ((outer_type == GIMPLE_OMP_TARGET)
241 && ((gimple_omp_target_kind (ctx->stmt)
242 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
243 || (gimple_omp_target_kind (ctx->stmt)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
245 || (gimple_omp_target_kind (ctx->stmt)
246 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
249 /* Return true if STMT corresponds to an OpenMP target region. */
250 static bool
251 is_omp_target (gimple *stmt)
253 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
255 int kind = gimple_omp_target_kind (stmt);
256 return (kind == GF_OMP_TARGET_KIND_REGION
257 || kind == GF_OMP_TARGET_KIND_DATA
258 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
259 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
261 return false;
264 /* If DECL is the artificial dummy VAR_DECL created for non-static
265 data member privatization, return the underlying "this" parameter,
266 otherwise return NULL. */
268 tree
269 omp_member_access_dummy_var (tree decl)
271 if (!VAR_P (decl)
272 || !DECL_ARTIFICIAL (decl)
273 || !DECL_IGNORED_P (decl)
274 || !DECL_HAS_VALUE_EXPR_P (decl)
275 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
276 return NULL_TREE;
278 tree v = DECL_VALUE_EXPR (decl);
279 if (TREE_CODE (v) != COMPONENT_REF)
280 return NULL_TREE;
282 while (1)
283 switch (TREE_CODE (v))
285 case COMPONENT_REF:
286 case MEM_REF:
287 case INDIRECT_REF:
288 CASE_CONVERT:
289 case POINTER_PLUS_EXPR:
290 v = TREE_OPERAND (v, 0);
291 continue;
292 case PARM_DECL:
293 if (DECL_CONTEXT (v) == current_function_decl
294 && DECL_ARTIFICIAL (v)
295 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
296 return v;
297 return NULL_TREE;
298 default:
299 return NULL_TREE;
303 /* Helper for unshare_and_remap, called through walk_tree. */
305 static tree
306 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
308 tree *pair = (tree *) data;
309 if (*tp == pair[0])
311 *tp = unshare_expr (pair[1]);
312 *walk_subtrees = 0;
314 else if (IS_TYPE_OR_DECL_P (*tp))
315 *walk_subtrees = 0;
316 return NULL_TREE;
319 /* Return unshare_expr (X) with all occurrences of FROM
320 replaced with TO. */
322 static tree
323 unshare_and_remap (tree x, tree from, tree to)
325 tree pair[2] = { from, to };
326 x = unshare_expr (x);
327 walk_tree (&x, unshare_and_remap_1, pair, NULL);
328 return x;
331 /* Convenience function for calling scan_omp_1_op on tree operands. */
333 static inline tree
334 scan_omp_op (tree *tp, omp_context *ctx)
336 struct walk_stmt_info wi;
338 memset (&wi, 0, sizeof (wi));
339 wi.info = ctx;
340 wi.want_locations = true;
342 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
345 static void lower_omp (gimple_seq *, omp_context *);
346 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
347 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
349 /* Return true if CTX is for an omp parallel. */
351 static inline bool
352 is_parallel_ctx (omp_context *ctx)
354 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
358 /* Return true if CTX is for an omp task. */
360 static inline bool
361 is_task_ctx (omp_context *ctx)
363 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
367 /* Return true if CTX is for an omp taskloop. */
369 static inline bool
370 is_taskloop_ctx (omp_context *ctx)
372 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
373 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
377 /* Return true if CTX is for a host omp teams. */
379 static inline bool
380 is_host_teams_ctx (omp_context *ctx)
382 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
383 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
386 /* Return true if CTX is for an omp parallel or omp task or host omp teams
387 (the last one is strictly not a task region in OpenMP speak, but we
388 need to treat it similarly). */
390 static inline bool
391 is_taskreg_ctx (omp_context *ctx)
393 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
396 /* Return true if EXPR is variable sized. */
398 static inline bool
399 is_variable_sized (const_tree expr)
401 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
404 /* Lookup variables. The "maybe" form
405 allows for the variable form to not have been entered, otherwise we
406 assert that the variable must have been entered. */
408 static inline tree
409 lookup_decl (tree var, omp_context *ctx)
411 tree *n = ctx->cb.decl_map->get (var);
412 return *n;
415 static inline tree
416 maybe_lookup_decl (const_tree var, omp_context *ctx)
418 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
419 return n ? *n : NULL_TREE;
422 static inline tree
423 lookup_field (tree var, omp_context *ctx)
425 splay_tree_node n;
426 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
427 return (tree) n->value;
430 static inline tree
431 lookup_sfield (splay_tree_key key, omp_context *ctx)
433 splay_tree_node n;
434 n = splay_tree_lookup (ctx->sfield_map
435 ? ctx->sfield_map : ctx->field_map, key);
436 return (tree) n->value;
439 static inline tree
440 lookup_sfield (tree var, omp_context *ctx)
442 return lookup_sfield ((splay_tree_key) var, ctx);
445 static inline tree
446 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
448 splay_tree_node n;
449 n = splay_tree_lookup (ctx->field_map, key);
450 return n ? (tree) n->value : NULL_TREE;
453 static inline tree
454 maybe_lookup_field (tree var, omp_context *ctx)
456 return maybe_lookup_field ((splay_tree_key) var, ctx);
459 /* Return true if DECL should be copied by pointer. SHARED_CTX is
460 the parallel context if DECL is to be shared. */
462 static bool
463 use_pointer_for_field (tree decl, omp_context *shared_ctx)
465 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
466 || TYPE_ATOMIC (TREE_TYPE (decl)))
467 return true;
469 /* We can only use copy-in/copy-out semantics for shared variables
470 when we know the value is not accessible from an outer scope. */
471 if (shared_ctx)
473 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
475 /* ??? Trivially accessible from anywhere. But why would we even
476 be passing an address in this case? Should we simply assert
477 this to be false, or should we have a cleanup pass that removes
478 these from the list of mappings? */
479 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
480 return true;
482 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
483 without analyzing the expression whether or not its location
484 is accessible to anyone else. In the case of nested parallel
485 regions it certainly may be. */
486 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
487 return true;
489 /* Do not use copy-in/copy-out for variables that have their
490 address taken. */
491 if (is_global_var (decl))
493 /* For file scope vars, track whether we've seen them as
494 non-addressable initially and in that case, keep the same
495 answer for the duration of the pass, even when they are made
496 addressable later on e.g. through reduction expansion. Global
497 variables which weren't addressable before the pass will not
498 have their privatized copies address taken. See PR91216. */
499 if (!TREE_ADDRESSABLE (decl))
501 if (!global_nonaddressable_vars)
502 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
503 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
505 else if (!global_nonaddressable_vars
506 || !bitmap_bit_p (global_nonaddressable_vars,
507 DECL_UID (decl)))
508 return true;
510 else if (TREE_ADDRESSABLE (decl))
511 return true;
513 /* lower_send_shared_vars only uses copy-in, but not copy-out
514 for these. */
515 if (TREE_READONLY (decl)
516 || ((TREE_CODE (decl) == RESULT_DECL
517 || TREE_CODE (decl) == PARM_DECL)
518 && DECL_BY_REFERENCE (decl)))
519 return false;
521 /* Disallow copy-in/out in nested parallel if
522 decl is shared in outer parallel, otherwise
523 each thread could store the shared variable
524 in its own copy-in location, making the
525 variable no longer really shared. */
526 if (shared_ctx->is_nested)
528 omp_context *up;
530 for (up = shared_ctx->outer; up; up = up->outer)
531 if ((is_taskreg_ctx (up)
532 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
533 && is_gimple_omp_offloaded (up->stmt)))
534 && maybe_lookup_decl (decl, up))
535 break;
537 if (up)
539 tree c;
541 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
543 for (c = gimple_omp_target_clauses (up->stmt);
544 c; c = OMP_CLAUSE_CHAIN (c))
545 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
546 && OMP_CLAUSE_DECL (c) == decl)
547 break;
549 else
550 for (c = gimple_omp_taskreg_clauses (up->stmt);
551 c; c = OMP_CLAUSE_CHAIN (c))
552 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
553 && OMP_CLAUSE_DECL (c) == decl)
554 break;
556 if (c)
557 goto maybe_mark_addressable_and_ret;
561 /* For tasks avoid using copy-in/out. As tasks can be
562 deferred or executed in different thread, when GOMP_task
563 returns, the task hasn't necessarily terminated. */
564 if (is_task_ctx (shared_ctx))
566 tree outer;
567 maybe_mark_addressable_and_ret:
568 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
569 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
571 /* Taking address of OUTER in lower_send_shared_vars
572 might need regimplification of everything that uses the
573 variable. */
574 if (!task_shared_vars)
575 task_shared_vars = BITMAP_ALLOC (NULL);
576 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
577 TREE_ADDRESSABLE (outer) = 1;
579 return true;
583 return false;
586 /* Construct a new automatic decl similar to VAR. */
588 static tree
589 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
591 tree copy = copy_var_decl (var, name, type);
593 DECL_CONTEXT (copy) = current_function_decl;
594 DECL_CHAIN (copy) = ctx->block_vars;
595 /* If VAR is listed in task_shared_vars, it means it wasn't
596 originally addressable and is just because task needs to take
597 it's address. But we don't need to take address of privatizations
598 from that var. */
599 if (TREE_ADDRESSABLE (var)
600 && ((task_shared_vars
601 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
602 || (global_nonaddressable_vars
603 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
604 TREE_ADDRESSABLE (copy) = 0;
605 ctx->block_vars = copy;
607 return copy;
610 static tree
611 omp_copy_decl_1 (tree var, omp_context *ctx)
613 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
616 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
617 as appropriate. */
618 /* See also 'gcc/omp-oacc-neuter-broadcast.cc:oacc_build_component_ref'. */
620 static tree
621 omp_build_component_ref (tree obj, tree field)
623 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
624 if (TREE_THIS_VOLATILE (field))
625 TREE_THIS_VOLATILE (ret) |= 1;
626 if (TREE_READONLY (field))
627 TREE_READONLY (ret) |= 1;
628 return ret;
631 /* Build tree nodes to access the field for VAR on the receiver side. */
633 static tree
634 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
636 tree x, field = lookup_field (var, ctx);
638 /* If the receiver record type was remapped in the child function,
639 remap the field into the new record type. */
640 x = maybe_lookup_field (field, ctx);
641 if (x != NULL)
642 field = x;
644 x = build_simple_mem_ref (ctx->receiver_decl);
645 TREE_THIS_NOTRAP (x) = 1;
646 x = omp_build_component_ref (x, field);
647 if (by_ref)
649 x = build_simple_mem_ref (x);
650 TREE_THIS_NOTRAP (x) = 1;
653 return x;
656 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
657 of a parallel, this is a component reference; for workshare constructs
658 this is some variable. */
660 static tree
661 build_outer_var_ref (tree var, omp_context *ctx,
662 enum omp_clause_code code = OMP_CLAUSE_ERROR)
664 tree x;
665 omp_context *outer = ctx->outer;
666 for (; outer; outer = outer->outer)
668 if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
669 continue;
670 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
671 && !maybe_lookup_decl (var, outer))
672 continue;
673 break;
676 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
677 x = var;
678 else if (is_variable_sized (var))
680 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
681 x = build_outer_var_ref (x, ctx, code);
682 x = build_simple_mem_ref (x);
684 else if (is_taskreg_ctx (ctx))
686 bool by_ref = use_pointer_for_field (var, NULL);
687 x = build_receiver_ref (var, by_ref, ctx);
689 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
690 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
691 || ctx->loop_p
692 || (code == OMP_CLAUSE_PRIVATE
693 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
694 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
695 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
697 /* #pragma omp simd isn't a worksharing construct, and can reference
698 even private vars in its linear etc. clauses.
699 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
700 to private vars in all worksharing constructs. */
701 x = NULL_TREE;
702 if (outer && is_taskreg_ctx (outer))
703 x = lookup_decl (var, outer);
704 else if (outer)
705 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
706 if (x == NULL_TREE)
707 x = var;
709 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
711 gcc_assert (outer);
712 splay_tree_node n
713 = splay_tree_lookup (outer->field_map,
714 (splay_tree_key) &DECL_UID (var));
715 if (n == NULL)
717 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
718 x = var;
719 else
720 x = lookup_decl (var, outer);
722 else
724 tree field = (tree) n->value;
725 /* If the receiver record type was remapped in the child function,
726 remap the field into the new record type. */
727 x = maybe_lookup_field (field, outer);
728 if (x != NULL)
729 field = x;
731 x = build_simple_mem_ref (outer->receiver_decl);
732 x = omp_build_component_ref (x, field);
733 if (use_pointer_for_field (var, outer))
734 x = build_simple_mem_ref (x);
737 else if (outer)
738 x = lookup_decl (var, outer);
739 else if (omp_is_reference (var))
740 /* This can happen with orphaned constructs. If var is reference, it is
741 possible it is shared and as such valid. */
742 x = var;
743 else if (omp_member_access_dummy_var (var))
744 x = var;
745 else
746 gcc_unreachable ();
748 if (x == var)
750 tree t = omp_member_access_dummy_var (var);
751 if (t)
753 x = DECL_VALUE_EXPR (var);
754 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
755 if (o != t)
756 x = unshare_and_remap (x, t, o);
757 else
758 x = unshare_expr (x);
762 if (omp_is_reference (var))
763 x = build_simple_mem_ref (x);
765 return x;
768 /* Build tree nodes to access the field for VAR on the sender side. */
770 static tree
771 build_sender_ref (splay_tree_key key, omp_context *ctx)
773 tree field = lookup_sfield (key, ctx);
774 return omp_build_component_ref (ctx->sender_decl, field);
777 static tree
778 build_sender_ref (tree var, omp_context *ctx)
780 return build_sender_ref ((splay_tree_key) var, ctx);
783 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
784 BASE_POINTERS_RESTRICT, declare the field with restrict. */
786 static void
787 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
789 tree field, type, sfield = NULL_TREE;
790 splay_tree_key key = (splay_tree_key) var;
792 if ((mask & 16) != 0)
794 key = (splay_tree_key) &DECL_NAME (var);
795 gcc_checking_assert (key != (splay_tree_key) var);
797 if ((mask & 8) != 0)
799 key = (splay_tree_key) &DECL_UID (var);
800 gcc_checking_assert (key != (splay_tree_key) var);
802 gcc_assert ((mask & 1) == 0
803 || !splay_tree_lookup (ctx->field_map, key));
804 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
805 || !splay_tree_lookup (ctx->sfield_map, key));
806 gcc_assert ((mask & 3) == 3
807 || !is_gimple_omp_oacc (ctx->stmt));
809 type = TREE_TYPE (var);
810 if ((mask & 16) != 0)
811 type = lang_hooks.decls.omp_array_data (var, true);
813 /* Prevent redeclaring the var in the split-off function with a restrict
814 pointer type. Note that we only clear type itself, restrict qualifiers in
815 the pointed-to type will be ignored by points-to analysis. */
816 if (POINTER_TYPE_P (type)
817 && TYPE_RESTRICT (type))
818 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
820 if (mask & 4)
822 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
823 type = build_pointer_type (build_pointer_type (type));
825 else if (by_ref)
826 type = build_pointer_type (type);
827 else if ((mask & (32 | 3)) == 1 && omp_is_reference (var))
828 type = TREE_TYPE (type);
830 field = build_decl (DECL_SOURCE_LOCATION (var),
831 FIELD_DECL, DECL_NAME (var), type);
833 /* Remember what variable this field was created for. This does have a
834 side effect of making dwarf2out ignore this member, so for helpful
835 debugging we clear it later in delete_omp_context. */
836 DECL_ABSTRACT_ORIGIN (field) = var;
837 if ((mask & 16) == 0 && type == TREE_TYPE (var))
839 SET_DECL_ALIGN (field, DECL_ALIGN (var));
840 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
841 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
843 else
844 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
846 if ((mask & 3) == 3)
848 insert_field_into_struct (ctx->record_type, field);
849 if (ctx->srecord_type)
851 sfield = build_decl (DECL_SOURCE_LOCATION (var),
852 FIELD_DECL, DECL_NAME (var), type);
853 DECL_ABSTRACT_ORIGIN (sfield) = var;
854 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
855 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
856 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
857 insert_field_into_struct (ctx->srecord_type, sfield);
860 else
862 if (ctx->srecord_type == NULL_TREE)
864 tree t;
866 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
867 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
868 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
870 sfield = build_decl (DECL_SOURCE_LOCATION (t),
871 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
872 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
873 insert_field_into_struct (ctx->srecord_type, sfield);
874 splay_tree_insert (ctx->sfield_map,
875 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
876 (splay_tree_value) sfield);
879 sfield = field;
880 insert_field_into_struct ((mask & 1) ? ctx->record_type
881 : ctx->srecord_type, field);
884 if (mask & 1)
885 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
886 if ((mask & 2) && ctx->sfield_map)
887 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
890 static tree
891 install_var_local (tree var, omp_context *ctx)
893 tree new_var = omp_copy_decl_1 (var, ctx);
894 insert_decl_map (&ctx->cb, var, new_var);
895 return new_var;
898 /* Adjust the replacement for DECL in CTX for the new context. This means
899 copying the DECL_VALUE_EXPR, and fixing up the type. */
901 static void
902 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
904 tree new_decl, size;
906 new_decl = lookup_decl (decl, ctx);
908 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
910 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
911 && DECL_HAS_VALUE_EXPR_P (decl))
913 tree ve = DECL_VALUE_EXPR (decl);
914 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
915 SET_DECL_VALUE_EXPR (new_decl, ve);
916 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
919 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
921 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
922 if (size == error_mark_node)
923 size = TYPE_SIZE (TREE_TYPE (new_decl));
924 DECL_SIZE (new_decl) = size;
926 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
927 if (size == error_mark_node)
928 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
929 DECL_SIZE_UNIT (new_decl) = size;
933 /* The callback for remap_decl. Search all containing contexts for a
934 mapping of the variable; this avoids having to duplicate the splay
935 tree ahead of time. We know a mapping doesn't already exist in the
936 given context. Create new mappings to implement default semantics. */
938 static tree
939 omp_copy_decl (tree var, copy_body_data *cb)
941 omp_context *ctx = (omp_context *) cb;
942 tree new_var;
944 if (TREE_CODE (var) == LABEL_DECL)
946 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
947 return var;
948 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
949 DECL_CONTEXT (new_var) = current_function_decl;
950 insert_decl_map (&ctx->cb, var, new_var);
951 return new_var;
954 while (!is_taskreg_ctx (ctx))
956 ctx = ctx->outer;
957 if (ctx == NULL)
958 return var;
959 new_var = maybe_lookup_decl (var, ctx);
960 if (new_var)
961 return new_var;
964 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
965 return var;
967 return error_mark_node;
970 /* Create a new context, with OUTER_CTX being the surrounding context. */
972 static omp_context *
973 new_omp_context (gimple *stmt, omp_context *outer_ctx)
975 omp_context *ctx = XCNEW (omp_context);
977 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
978 (splay_tree_value) ctx);
979 ctx->stmt = stmt;
981 if (outer_ctx)
983 ctx->outer = outer_ctx;
984 ctx->cb = outer_ctx->cb;
985 ctx->cb.block = NULL;
986 ctx->depth = outer_ctx->depth + 1;
988 else
990 ctx->cb.src_fn = current_function_decl;
991 ctx->cb.dst_fn = current_function_decl;
992 ctx->cb.src_node = cgraph_node::get (current_function_decl);
993 gcc_checking_assert (ctx->cb.src_node);
994 ctx->cb.dst_node = ctx->cb.src_node;
995 ctx->cb.src_cfun = cfun;
996 ctx->cb.copy_decl = omp_copy_decl;
997 ctx->cb.eh_lp_nr = 0;
998 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
999 ctx->cb.adjust_array_error_bounds = true;
1000 ctx->cb.dont_remap_vla_if_no_change = true;
1001 ctx->depth = 1;
1004 ctx->cb.decl_map = new hash_map<tree, tree>;
1006 return ctx;
1009 static gimple_seq maybe_catch_exception (gimple_seq);
1011 /* Finalize task copyfn. */
1013 static void
1014 finalize_task_copyfn (gomp_task *task_stmt)
1016 struct function *child_cfun;
1017 tree child_fn;
1018 gimple_seq seq = NULL, new_seq;
1019 gbind *bind;
1021 child_fn = gimple_omp_task_copy_fn (task_stmt);
1022 if (child_fn == NULL_TREE)
1023 return;
1025 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1026 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1028 push_cfun (child_cfun);
1029 bind = gimplify_body (child_fn, false);
1030 gimple_seq_add_stmt (&seq, bind);
1031 new_seq = maybe_catch_exception (seq);
1032 if (new_seq != seq)
1034 bind = gimple_build_bind (NULL, new_seq, NULL);
1035 seq = NULL;
1036 gimple_seq_add_stmt (&seq, bind);
1038 gimple_set_body (child_fn, seq);
1039 pop_cfun ();
1041 /* Inform the callgraph about the new function. */
1042 cgraph_node *node = cgraph_node::get_create (child_fn);
1043 node->parallelized_function = 1;
1044 cgraph_node::add_new_function (child_fn, false);
1047 /* Destroy a omp_context data structures. Called through the splay tree
1048 value delete callback. */
1050 static void
1051 delete_omp_context (splay_tree_value value)
1053 omp_context *ctx = (omp_context *) value;
1055 delete ctx->cb.decl_map;
1057 if (ctx->field_map)
1058 splay_tree_delete (ctx->field_map);
1059 if (ctx->sfield_map)
1060 splay_tree_delete (ctx->sfield_map);
1062 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1063 it produces corrupt debug information. */
1064 if (ctx->record_type)
1066 tree t;
1067 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1068 DECL_ABSTRACT_ORIGIN (t) = NULL;
1070 if (ctx->srecord_type)
1072 tree t;
1073 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1074 DECL_ABSTRACT_ORIGIN (t) = NULL;
1077 if (is_task_ctx (ctx))
1078 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1080 if (ctx->task_reduction_map)
1082 ctx->task_reductions.release ();
1083 delete ctx->task_reduction_map;
1086 delete ctx->lastprivate_conditional_map;
1087 delete ctx->allocate_map;
1089 XDELETE (ctx);
1092 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1093 context. */
1095 static void
1096 fixup_child_record_type (omp_context *ctx)
1098 tree f, type = ctx->record_type;
1100 if (!ctx->receiver_decl)
1101 return;
1102 /* ??? It isn't sufficient to just call remap_type here, because
1103 variably_modified_type_p doesn't work the way we expect for
1104 record types. Testing each field for whether it needs remapping
1105 and creating a new record by hand works, however. */
1106 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1107 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1108 break;
1109 if (f)
1111 tree name, new_fields = NULL;
1113 type = lang_hooks.types.make_type (RECORD_TYPE);
1114 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1115 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1116 TYPE_DECL, name, type);
1117 TYPE_NAME (type) = name;
1119 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1121 tree new_f = copy_node (f);
1122 DECL_CONTEXT (new_f) = type;
1123 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1124 DECL_CHAIN (new_f) = new_fields;
1125 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1126 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1127 &ctx->cb, NULL);
1128 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1129 &ctx->cb, NULL);
1130 new_fields = new_f;
1132 /* Arrange to be able to look up the receiver field
1133 given the sender field. */
1134 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1135 (splay_tree_value) new_f);
1137 TYPE_FIELDS (type) = nreverse (new_fields);
1138 layout_type (type);
1141 /* In a target region we never modify any of the pointers in *.omp_data_i,
1142 so attempt to help the optimizers. */
1143 if (is_gimple_omp_offloaded (ctx->stmt))
1144 type = build_qualified_type (type, TYPE_QUAL_CONST);
1146 TREE_TYPE (ctx->receiver_decl)
1147 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1150 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1151 specified by CLAUSES. */
1153 static void
1154 scan_sharing_clauses (tree clauses, omp_context *ctx)
1156 tree c, decl;
1157 bool scan_array_reductions = false;
1159 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1160 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1161 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1162 /* omp_default_mem_alloc is 1 */
1163 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))))
1165 if (ctx->allocate_map == NULL)
1166 ctx->allocate_map = new hash_map<tree, tree>;
1167 ctx->allocate_map->put (OMP_CLAUSE_DECL (c),
1168 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
1169 ? OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
1170 : integer_zero_node);
1173 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1175 bool by_ref;
1177 switch (OMP_CLAUSE_CODE (c))
1179 case OMP_CLAUSE_PRIVATE:
1180 decl = OMP_CLAUSE_DECL (c);
1181 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1182 goto do_private;
1183 else if (!is_variable_sized (decl))
1184 install_var_local (decl, ctx);
1185 break;
1187 case OMP_CLAUSE_SHARED:
1188 decl = OMP_CLAUSE_DECL (c);
1189 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1190 ctx->allocate_map->remove (decl);
1191 /* Ignore shared directives in teams construct inside of
1192 target construct. */
1193 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1194 && !is_host_teams_ctx (ctx))
1196 /* Global variables don't need to be copied,
1197 the receiver side will use them directly. */
1198 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1199 if (is_global_var (odecl))
1200 break;
1201 insert_decl_map (&ctx->cb, decl, odecl);
1202 break;
1204 gcc_assert (is_taskreg_ctx (ctx));
1205 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1206 || !is_variable_sized (decl));
1207 /* Global variables don't need to be copied,
1208 the receiver side will use them directly. */
1209 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1210 break;
1211 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1213 use_pointer_for_field (decl, ctx);
1214 break;
1216 by_ref = use_pointer_for_field (decl, NULL);
1217 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1218 || TREE_ADDRESSABLE (decl)
1219 || by_ref
1220 || omp_is_reference (decl))
1222 by_ref = use_pointer_for_field (decl, ctx);
1223 install_var_field (decl, by_ref, 3, ctx);
1224 install_var_local (decl, ctx);
1225 break;
1227 /* We don't need to copy const scalar vars back. */
1228 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1229 goto do_private;
1231 case OMP_CLAUSE_REDUCTION:
1232 /* Collect 'reduction' clauses on OpenACC compute construct. */
1233 if (is_gimple_omp_oacc (ctx->stmt)
1234 && is_gimple_omp_offloaded (ctx->stmt))
1236 /* No 'reduction' clauses on OpenACC 'kernels'. */
1237 gcc_checking_assert (!is_oacc_kernels (ctx));
1238 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1239 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1241 ctx->local_reduction_clauses
1242 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1244 /* FALLTHRU */
1246 case OMP_CLAUSE_IN_REDUCTION:
1247 decl = OMP_CLAUSE_DECL (c);
1248 if (ctx->allocate_map
1249 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1250 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1251 || OMP_CLAUSE_REDUCTION_TASK (c)))
1252 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1253 || is_task_ctx (ctx)))
1255 /* For now. */
1256 if (ctx->allocate_map->get (decl))
1257 ctx->allocate_map->remove (decl);
1259 if (TREE_CODE (decl) == MEM_REF)
1261 tree t = TREE_OPERAND (decl, 0);
1262 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1263 t = TREE_OPERAND (t, 0);
1264 if (TREE_CODE (t) == INDIRECT_REF
1265 || TREE_CODE (t) == ADDR_EXPR)
1266 t = TREE_OPERAND (t, 0);
1267 if (is_omp_target (ctx->stmt))
1269 if (is_variable_sized (t))
1271 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1272 t = DECL_VALUE_EXPR (t);
1273 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
1274 t = TREE_OPERAND (t, 0);
1275 gcc_assert (DECL_P (t));
1277 tree at = t;
1278 if (ctx->outer)
1279 scan_omp_op (&at, ctx->outer);
1280 tree nt = omp_copy_decl_1 (at, ctx);
1281 splay_tree_insert (ctx->field_map,
1282 (splay_tree_key) &DECL_CONTEXT (t),
1283 (splay_tree_value) nt);
1284 if (at != t)
1285 splay_tree_insert (ctx->field_map,
1286 (splay_tree_key) &DECL_CONTEXT (at),
1287 (splay_tree_value) nt);
1288 break;
1290 install_var_local (t, ctx);
1291 if (is_taskreg_ctx (ctx)
1292 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1293 || (is_task_ctx (ctx)
1294 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1295 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1296 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1297 == POINTER_TYPE)))))
1298 && !is_variable_sized (t)
1299 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1300 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1301 && !is_task_ctx (ctx))))
1303 by_ref = use_pointer_for_field (t, NULL);
1304 if (is_task_ctx (ctx)
1305 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1306 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1308 install_var_field (t, false, 1, ctx);
1309 install_var_field (t, by_ref, 2, ctx);
1311 else
1312 install_var_field (t, by_ref, 3, ctx);
1314 break;
1316 if (is_omp_target (ctx->stmt))
1318 tree at = decl;
1319 if (ctx->outer)
1320 scan_omp_op (&at, ctx->outer);
1321 tree nt = omp_copy_decl_1 (at, ctx);
1322 splay_tree_insert (ctx->field_map,
1323 (splay_tree_key) &DECL_CONTEXT (decl),
1324 (splay_tree_value) nt);
1325 if (at != decl)
1326 splay_tree_insert (ctx->field_map,
1327 (splay_tree_key) &DECL_CONTEXT (at),
1328 (splay_tree_value) nt);
1329 break;
1331 if (is_task_ctx (ctx)
1332 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1333 && OMP_CLAUSE_REDUCTION_TASK (c)
1334 && is_parallel_ctx (ctx)))
1336 /* Global variables don't need to be copied,
1337 the receiver side will use them directly. */
1338 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1340 by_ref = use_pointer_for_field (decl, ctx);
1341 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1342 install_var_field (decl, by_ref, 3, ctx);
1344 install_var_local (decl, ctx);
1345 break;
1347 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1348 && OMP_CLAUSE_REDUCTION_TASK (c))
1350 install_var_local (decl, ctx);
1351 break;
1353 goto do_private;
1355 case OMP_CLAUSE_LASTPRIVATE:
1356 /* Let the corresponding firstprivate clause create
1357 the variable. */
1358 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1359 break;
1360 /* FALLTHRU */
1362 case OMP_CLAUSE_FIRSTPRIVATE:
1363 case OMP_CLAUSE_LINEAR:
1364 decl = OMP_CLAUSE_DECL (c);
1365 do_private:
1366 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1367 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1368 && is_gimple_omp_offloaded (ctx->stmt))
1370 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1371 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1372 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1373 install_var_field (decl, true, 3, ctx);
1374 else
1375 install_var_field (decl, false, 3, ctx);
1377 if (is_variable_sized (decl))
1379 if (is_task_ctx (ctx))
1381 if (ctx->allocate_map
1382 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1384 /* For now. */
1385 if (ctx->allocate_map->get (decl))
1386 ctx->allocate_map->remove (decl);
1388 install_var_field (decl, false, 1, ctx);
1390 break;
1392 else if (is_taskreg_ctx (ctx))
1394 bool global
1395 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1396 by_ref = use_pointer_for_field (decl, NULL);
1398 if (is_task_ctx (ctx)
1399 && (global || by_ref || omp_is_reference (decl)))
1401 if (ctx->allocate_map
1402 && ctx->allocate_map->get (decl))
1403 install_var_field (decl, by_ref, 32 | 1, ctx);
1404 else
1405 install_var_field (decl, false, 1, ctx);
1406 if (!global)
1407 install_var_field (decl, by_ref, 2, ctx);
1409 else if (!global)
1410 install_var_field (decl, by_ref, 3, ctx);
1412 install_var_local (decl, ctx);
1413 break;
1415 case OMP_CLAUSE_USE_DEVICE_PTR:
1416 case OMP_CLAUSE_USE_DEVICE_ADDR:
1417 decl = OMP_CLAUSE_DECL (c);
1419 /* Fortran array descriptors. */
1420 if (lang_hooks.decls.omp_array_data (decl, true))
1421 install_var_field (decl, false, 19, ctx);
1422 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1423 && !omp_is_reference (decl)
1424 && !omp_is_allocatable_or_ptr (decl))
1425 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1426 install_var_field (decl, true, 11, ctx);
1427 else
1428 install_var_field (decl, false, 11, ctx);
1429 if (DECL_SIZE (decl)
1430 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1432 tree decl2 = DECL_VALUE_EXPR (decl);
1433 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1434 decl2 = TREE_OPERAND (decl2, 0);
1435 gcc_assert (DECL_P (decl2));
1436 install_var_local (decl2, ctx);
1438 install_var_local (decl, ctx);
1439 break;
1441 case OMP_CLAUSE_IS_DEVICE_PTR:
1442 decl = OMP_CLAUSE_DECL (c);
1443 goto do_private;
1445 case OMP_CLAUSE__LOOPTEMP_:
1446 case OMP_CLAUSE__REDUCTEMP_:
1447 gcc_assert (is_taskreg_ctx (ctx));
1448 decl = OMP_CLAUSE_DECL (c);
1449 install_var_field (decl, false, 3, ctx);
1450 install_var_local (decl, ctx);
1451 break;
1453 case OMP_CLAUSE_COPYPRIVATE:
1454 case OMP_CLAUSE_COPYIN:
1455 decl = OMP_CLAUSE_DECL (c);
1456 by_ref = use_pointer_for_field (decl, NULL);
1457 install_var_field (decl, by_ref, 3, ctx);
1458 break;
1460 case OMP_CLAUSE_FINAL:
1461 case OMP_CLAUSE_IF:
1462 case OMP_CLAUSE_NUM_THREADS:
1463 case OMP_CLAUSE_NUM_TEAMS:
1464 case OMP_CLAUSE_THREAD_LIMIT:
1465 case OMP_CLAUSE_DEVICE:
1466 case OMP_CLAUSE_SCHEDULE:
1467 case OMP_CLAUSE_DIST_SCHEDULE:
1468 case OMP_CLAUSE_DEPEND:
1469 case OMP_CLAUSE_PRIORITY:
1470 case OMP_CLAUSE_GRAINSIZE:
1471 case OMP_CLAUSE_NUM_TASKS:
1472 case OMP_CLAUSE_NUM_GANGS:
1473 case OMP_CLAUSE_NUM_WORKERS:
1474 case OMP_CLAUSE_VECTOR_LENGTH:
1475 case OMP_CLAUSE_DETACH:
1476 case OMP_CLAUSE_FILTER:
1477 if (ctx->outer)
1478 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1479 break;
1481 case OMP_CLAUSE_TO:
1482 case OMP_CLAUSE_FROM:
1483 case OMP_CLAUSE_MAP:
1484 if (ctx->outer)
1485 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1486 decl = OMP_CLAUSE_DECL (c);
1487 /* Global variables with "omp declare target" attribute
1488 don't need to be copied, the receiver side will use them
1489 directly. However, global variables with "omp declare target link"
1490 attribute need to be copied. Or when ALWAYS modifier is used. */
1491 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1492 && DECL_P (decl)
1493 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1494 && (OMP_CLAUSE_MAP_KIND (c)
1495 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1496 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1497 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1498 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1499 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1500 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1501 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1502 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1503 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1504 && varpool_node::get_create (decl)->offloadable
1505 && !lookup_attribute ("omp declare target link",
1506 DECL_ATTRIBUTES (decl)))
1507 break;
1508 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1509 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1511 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1512 not offloaded; there is nothing to map for those. */
1513 if (!is_gimple_omp_offloaded (ctx->stmt)
1514 && !POINTER_TYPE_P (TREE_TYPE (decl))
1515 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1516 break;
1518 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1519 && DECL_P (decl)
1520 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1521 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1522 && is_omp_target (ctx->stmt))
1524 /* If this is an offloaded region, an attach operation should
1525 only exist when the pointer variable is mapped in a prior
1526 clause. */
1527 if (is_gimple_omp_offloaded (ctx->stmt))
1528 gcc_assert
1529 (maybe_lookup_decl (decl, ctx)
1530 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1531 && lookup_attribute ("omp declare target",
1532 DECL_ATTRIBUTES (decl))));
1534 /* By itself, attach/detach is generated as part of pointer
1535 variable mapping and should not create new variables in the
1536 offloaded region, however sender refs for it must be created
1537 for its address to be passed to the runtime. */
1538 tree field
1539 = build_decl (OMP_CLAUSE_LOCATION (c),
1540 FIELD_DECL, NULL_TREE, ptr_type_node);
1541 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1542 insert_field_into_struct (ctx->record_type, field);
1543 /* To not clash with a map of the pointer variable itself,
1544 attach/detach maps have their field looked up by the *clause*
1545 tree expression, not the decl. */
1546 gcc_assert (!splay_tree_lookup (ctx->field_map,
1547 (splay_tree_key) c));
1548 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1549 (splay_tree_value) field);
1550 break;
1552 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1553 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1554 || (OMP_CLAUSE_MAP_KIND (c)
1555 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1557 if (TREE_CODE (decl) == COMPONENT_REF
1558 || (TREE_CODE (decl) == INDIRECT_REF
1559 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1560 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1561 == REFERENCE_TYPE)))
1562 break;
1563 if (DECL_SIZE (decl)
1564 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1566 tree decl2 = DECL_VALUE_EXPR (decl);
1567 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1568 decl2 = TREE_OPERAND (decl2, 0);
1569 gcc_assert (DECL_P (decl2));
1570 install_var_local (decl2, ctx);
1572 install_var_local (decl, ctx);
1573 break;
1575 if (DECL_P (decl))
1577 if (DECL_SIZE (decl)
1578 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1580 tree decl2 = DECL_VALUE_EXPR (decl);
1581 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1582 decl2 = TREE_OPERAND (decl2, 0);
1583 gcc_assert (DECL_P (decl2));
1584 install_var_field (decl2, true, 3, ctx);
1585 install_var_local (decl2, ctx);
1586 install_var_local (decl, ctx);
1588 else
1590 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1591 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1592 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1593 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1594 install_var_field (decl, true, 7, ctx);
1595 else
1596 install_var_field (decl, true, 3, ctx);
1597 if (is_gimple_omp_offloaded (ctx->stmt)
1598 && !(is_gimple_omp_oacc (ctx->stmt)
1599 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1600 install_var_local (decl, ctx);
1603 else
1605 tree base = get_base_address (decl);
1606 tree nc = OMP_CLAUSE_CHAIN (c);
1607 if (DECL_P (base)
1608 && nc != NULL_TREE
1609 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1610 && OMP_CLAUSE_DECL (nc) == base
1611 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1612 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1614 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1615 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1617 else
1619 if (ctx->outer)
1621 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1622 decl = OMP_CLAUSE_DECL (c);
1624 gcc_assert (!splay_tree_lookup (ctx->field_map,
1625 (splay_tree_key) decl));
1626 tree field
1627 = build_decl (OMP_CLAUSE_LOCATION (c),
1628 FIELD_DECL, NULL_TREE, ptr_type_node);
1629 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1630 insert_field_into_struct (ctx->record_type, field);
1631 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1632 (splay_tree_value) field);
1635 break;
1637 case OMP_CLAUSE_ORDER:
1638 ctx->order_concurrent = true;
1639 break;
1641 case OMP_CLAUSE_BIND:
1642 ctx->loop_p = true;
1643 break;
1645 case OMP_CLAUSE_NOWAIT:
1646 case OMP_CLAUSE_ORDERED:
1647 case OMP_CLAUSE_COLLAPSE:
1648 case OMP_CLAUSE_UNTIED:
1649 case OMP_CLAUSE_MERGEABLE:
1650 case OMP_CLAUSE_PROC_BIND:
1651 case OMP_CLAUSE_SAFELEN:
1652 case OMP_CLAUSE_SIMDLEN:
1653 case OMP_CLAUSE_THREADS:
1654 case OMP_CLAUSE_SIMD:
1655 case OMP_CLAUSE_NOGROUP:
1656 case OMP_CLAUSE_DEFAULTMAP:
1657 case OMP_CLAUSE_ASYNC:
1658 case OMP_CLAUSE_WAIT:
1659 case OMP_CLAUSE_GANG:
1660 case OMP_CLAUSE_WORKER:
1661 case OMP_CLAUSE_VECTOR:
1662 case OMP_CLAUSE_INDEPENDENT:
1663 case OMP_CLAUSE_AUTO:
1664 case OMP_CLAUSE_SEQ:
1665 case OMP_CLAUSE_TILE:
1666 case OMP_CLAUSE__SIMT_:
1667 case OMP_CLAUSE_DEFAULT:
1668 case OMP_CLAUSE_NONTEMPORAL:
1669 case OMP_CLAUSE_IF_PRESENT:
1670 case OMP_CLAUSE_FINALIZE:
1671 case OMP_CLAUSE_TASK_REDUCTION:
1672 case OMP_CLAUSE_ALLOCATE:
1673 break;
1675 case OMP_CLAUSE_ALIGNED:
1676 decl = OMP_CLAUSE_DECL (c);
1677 if (is_global_var (decl)
1678 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1679 install_var_local (decl, ctx);
1680 break;
1682 case OMP_CLAUSE__CONDTEMP_:
1683 decl = OMP_CLAUSE_DECL (c);
1684 if (is_parallel_ctx (ctx))
1686 install_var_field (decl, false, 3, ctx);
1687 install_var_local (decl, ctx);
1689 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1690 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1691 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1692 install_var_local (decl, ctx);
1693 break;
1695 case OMP_CLAUSE__CACHE_:
1696 case OMP_CLAUSE_NOHOST:
1697 default:
1698 gcc_unreachable ();
1702 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1704 switch (OMP_CLAUSE_CODE (c))
1706 case OMP_CLAUSE_LASTPRIVATE:
1707 /* Let the corresponding firstprivate clause create
1708 the variable. */
1709 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1710 scan_array_reductions = true;
1711 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1712 break;
1713 /* FALLTHRU */
1715 case OMP_CLAUSE_FIRSTPRIVATE:
1716 case OMP_CLAUSE_PRIVATE:
1717 case OMP_CLAUSE_LINEAR:
1718 case OMP_CLAUSE_IS_DEVICE_PTR:
1719 decl = OMP_CLAUSE_DECL (c);
1720 if (is_variable_sized (decl))
1722 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1723 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1724 && is_gimple_omp_offloaded (ctx->stmt))
1726 tree decl2 = DECL_VALUE_EXPR (decl);
1727 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1728 decl2 = TREE_OPERAND (decl2, 0);
1729 gcc_assert (DECL_P (decl2));
1730 install_var_local (decl2, ctx);
1731 fixup_remapped_decl (decl2, ctx, false);
1733 install_var_local (decl, ctx);
1735 fixup_remapped_decl (decl, ctx,
1736 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1737 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1738 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1739 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1740 scan_array_reductions = true;
1741 break;
1743 case OMP_CLAUSE_REDUCTION:
1744 case OMP_CLAUSE_IN_REDUCTION:
1745 decl = OMP_CLAUSE_DECL (c);
1746 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1748 if (is_variable_sized (decl))
1749 install_var_local (decl, ctx);
1750 fixup_remapped_decl (decl, ctx, false);
1752 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1753 scan_array_reductions = true;
1754 break;
1756 case OMP_CLAUSE_TASK_REDUCTION:
1757 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1758 scan_array_reductions = true;
1759 break;
1761 case OMP_CLAUSE_SHARED:
1762 /* Ignore shared directives in teams construct inside of
1763 target construct. */
1764 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1765 && !is_host_teams_ctx (ctx))
1766 break;
1767 decl = OMP_CLAUSE_DECL (c);
1768 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1769 break;
1770 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1772 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1773 ctx->outer)))
1774 break;
1775 bool by_ref = use_pointer_for_field (decl, ctx);
1776 install_var_field (decl, by_ref, 11, ctx);
1777 break;
1779 fixup_remapped_decl (decl, ctx, false);
1780 break;
1782 case OMP_CLAUSE_MAP:
1783 if (!is_gimple_omp_offloaded (ctx->stmt))
1784 break;
1785 decl = OMP_CLAUSE_DECL (c);
1786 if (DECL_P (decl)
1787 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1788 && (OMP_CLAUSE_MAP_KIND (c)
1789 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1790 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1791 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1792 && varpool_node::get_create (decl)->offloadable)
1793 break;
1794 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1795 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1796 && is_omp_target (ctx->stmt)
1797 && !is_gimple_omp_offloaded (ctx->stmt))
1798 break;
1799 if (DECL_P (decl))
1801 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1802 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1803 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1804 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1806 tree new_decl = lookup_decl (decl, ctx);
1807 TREE_TYPE (new_decl)
1808 = remap_type (TREE_TYPE (decl), &ctx->cb);
1810 else if (DECL_SIZE (decl)
1811 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1813 tree decl2 = DECL_VALUE_EXPR (decl);
1814 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1815 decl2 = TREE_OPERAND (decl2, 0);
1816 gcc_assert (DECL_P (decl2));
1817 fixup_remapped_decl (decl2, ctx, false);
1818 fixup_remapped_decl (decl, ctx, true);
1820 else
1821 fixup_remapped_decl (decl, ctx, false);
1823 break;
1825 case OMP_CLAUSE_COPYPRIVATE:
1826 case OMP_CLAUSE_COPYIN:
1827 case OMP_CLAUSE_DEFAULT:
1828 case OMP_CLAUSE_IF:
1829 case OMP_CLAUSE_NUM_THREADS:
1830 case OMP_CLAUSE_NUM_TEAMS:
1831 case OMP_CLAUSE_THREAD_LIMIT:
1832 case OMP_CLAUSE_DEVICE:
1833 case OMP_CLAUSE_SCHEDULE:
1834 case OMP_CLAUSE_DIST_SCHEDULE:
1835 case OMP_CLAUSE_NOWAIT:
1836 case OMP_CLAUSE_ORDERED:
1837 case OMP_CLAUSE_COLLAPSE:
1838 case OMP_CLAUSE_UNTIED:
1839 case OMP_CLAUSE_FINAL:
1840 case OMP_CLAUSE_MERGEABLE:
1841 case OMP_CLAUSE_PROC_BIND:
1842 case OMP_CLAUSE_SAFELEN:
1843 case OMP_CLAUSE_SIMDLEN:
1844 case OMP_CLAUSE_ALIGNED:
1845 case OMP_CLAUSE_DEPEND:
1846 case OMP_CLAUSE_DETACH:
1847 case OMP_CLAUSE_ALLOCATE:
1848 case OMP_CLAUSE__LOOPTEMP_:
1849 case OMP_CLAUSE__REDUCTEMP_:
1850 case OMP_CLAUSE_TO:
1851 case OMP_CLAUSE_FROM:
1852 case OMP_CLAUSE_PRIORITY:
1853 case OMP_CLAUSE_GRAINSIZE:
1854 case OMP_CLAUSE_NUM_TASKS:
1855 case OMP_CLAUSE_THREADS:
1856 case OMP_CLAUSE_SIMD:
1857 case OMP_CLAUSE_NOGROUP:
1858 case OMP_CLAUSE_DEFAULTMAP:
1859 case OMP_CLAUSE_ORDER:
1860 case OMP_CLAUSE_BIND:
1861 case OMP_CLAUSE_USE_DEVICE_PTR:
1862 case OMP_CLAUSE_USE_DEVICE_ADDR:
1863 case OMP_CLAUSE_NONTEMPORAL:
1864 case OMP_CLAUSE_ASYNC:
1865 case OMP_CLAUSE_WAIT:
1866 case OMP_CLAUSE_NUM_GANGS:
1867 case OMP_CLAUSE_NUM_WORKERS:
1868 case OMP_CLAUSE_VECTOR_LENGTH:
1869 case OMP_CLAUSE_GANG:
1870 case OMP_CLAUSE_WORKER:
1871 case OMP_CLAUSE_VECTOR:
1872 case OMP_CLAUSE_INDEPENDENT:
1873 case OMP_CLAUSE_AUTO:
1874 case OMP_CLAUSE_SEQ:
1875 case OMP_CLAUSE_TILE:
1876 case OMP_CLAUSE__SIMT_:
1877 case OMP_CLAUSE_IF_PRESENT:
1878 case OMP_CLAUSE_FINALIZE:
1879 case OMP_CLAUSE_FILTER:
1880 case OMP_CLAUSE__CONDTEMP_:
1881 break;
1883 case OMP_CLAUSE__CACHE_:
1884 case OMP_CLAUSE_NOHOST:
1885 default:
1886 gcc_unreachable ();
1890 gcc_checking_assert (!scan_array_reductions
1891 || !is_gimple_omp_oacc (ctx->stmt));
1892 if (scan_array_reductions)
1894 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1895 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1896 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1897 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1898 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1900 omp_context *rctx = ctx;
1901 if (is_omp_target (ctx->stmt))
1902 rctx = ctx->outer;
1903 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1904 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
1906 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1907 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1908 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1909 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1910 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1911 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1915 /* Create a new name for omp child function. Returns an identifier. */
1917 static tree
1918 create_omp_child_function_name (bool task_copy)
1920 return clone_function_name_numbered (current_function_decl,
1921 task_copy ? "_omp_cpyfn" : "_omp_fn");
1924 /* Return true if CTX may belong to offloaded code: either if current function
1925 is offloaded, or any enclosing context corresponds to a target region. */
1927 static bool
1928 omp_maybe_offloaded_ctx (omp_context *ctx)
1930 if (cgraph_node::get (current_function_decl)->offloadable)
1931 return true;
1932 for (; ctx; ctx = ctx->outer)
1933 if (is_gimple_omp_offloaded (ctx->stmt))
1934 return true;
1935 return false;
1938 /* Build a decl for the omp child function. It'll not contain a body
1939 yet, just the bare decl. */
1941 static void
1942 create_omp_child_function (omp_context *ctx, bool task_copy)
1944 tree decl, type, name, t;
1946 name = create_omp_child_function_name (task_copy);
1947 if (task_copy)
1948 type = build_function_type_list (void_type_node, ptr_type_node,
1949 ptr_type_node, NULL_TREE);
1950 else
1951 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1953 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1955 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1956 || !task_copy);
1957 if (!task_copy)
1958 ctx->cb.dst_fn = decl;
1959 else
1960 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1962 TREE_STATIC (decl) = 1;
1963 TREE_USED (decl) = 1;
1964 DECL_ARTIFICIAL (decl) = 1;
1965 DECL_IGNORED_P (decl) = 0;
1966 TREE_PUBLIC (decl) = 0;
1967 DECL_UNINLINABLE (decl) = 1;
1968 DECL_EXTERNAL (decl) = 0;
1969 DECL_CONTEXT (decl) = NULL_TREE;
1970 DECL_INITIAL (decl) = make_node (BLOCK);
1971 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1972 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1973 /* Remove omp declare simd attribute from the new attributes. */
1974 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1976 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1977 a = a2;
1978 a = TREE_CHAIN (a);
1979 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1980 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1981 *p = TREE_CHAIN (*p);
1982 else
1984 tree chain = TREE_CHAIN (*p);
1985 *p = copy_node (*p);
1986 p = &TREE_CHAIN (*p);
1987 *p = chain;
1990 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1991 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1992 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1993 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1994 DECL_FUNCTION_VERSIONED (decl)
1995 = DECL_FUNCTION_VERSIONED (current_function_decl);
1997 if (omp_maybe_offloaded_ctx (ctx))
1999 cgraph_node::get_create (decl)->offloadable = 1;
2000 if (ENABLE_OFFLOADING)
2001 g->have_offload = true;
2004 if (cgraph_node::get_create (decl)->offloadable)
2006 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2007 ? "omp target entrypoint"
2008 : "omp declare target");
2009 if (lookup_attribute ("omp declare target",
2010 DECL_ATTRIBUTES (current_function_decl)))
2012 if (is_gimple_omp_offloaded (ctx->stmt))
2013 DECL_ATTRIBUTES (decl)
2014 = remove_attribute ("omp declare target",
2015 copy_list (DECL_ATTRIBUTES (decl)));
2016 else
2017 target_attr = NULL;
2019 if (target_attr)
2020 DECL_ATTRIBUTES (decl)
2021 = tree_cons (get_identifier (target_attr),
2022 NULL_TREE, DECL_ATTRIBUTES (decl));
2025 t = build_decl (DECL_SOURCE_LOCATION (decl),
2026 RESULT_DECL, NULL_TREE, void_type_node);
2027 DECL_ARTIFICIAL (t) = 1;
2028 DECL_IGNORED_P (t) = 1;
2029 DECL_CONTEXT (t) = decl;
2030 DECL_RESULT (decl) = t;
2032 tree data_name = get_identifier (".omp_data_i");
2033 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2034 ptr_type_node);
2035 DECL_ARTIFICIAL (t) = 1;
2036 DECL_NAMELESS (t) = 1;
2037 DECL_ARG_TYPE (t) = ptr_type_node;
2038 DECL_CONTEXT (t) = current_function_decl;
2039 TREE_USED (t) = 1;
2040 TREE_READONLY (t) = 1;
2041 DECL_ARGUMENTS (decl) = t;
2042 if (!task_copy)
2043 ctx->receiver_decl = t;
2044 else
2046 t = build_decl (DECL_SOURCE_LOCATION (decl),
2047 PARM_DECL, get_identifier (".omp_data_o"),
2048 ptr_type_node);
2049 DECL_ARTIFICIAL (t) = 1;
2050 DECL_NAMELESS (t) = 1;
2051 DECL_ARG_TYPE (t) = ptr_type_node;
2052 DECL_CONTEXT (t) = current_function_decl;
2053 TREE_USED (t) = 1;
2054 TREE_ADDRESSABLE (t) = 1;
2055 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2056 DECL_ARGUMENTS (decl) = t;
2059 /* Allocate memory for the function structure. The call to
2060 allocate_struct_function clobbers CFUN, so we need to restore
2061 it afterward. */
2062 push_struct_function (decl);
2063 cfun->function_end_locus = gimple_location (ctx->stmt);
2064 init_tree_ssa (cfun);
2065 pop_cfun ();
2068 /* Callback for walk_gimple_seq. Check if combined parallel
2069 contains gimple_omp_for_combined_into_p OMP_FOR. */
2071 tree
2072 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2073 bool *handled_ops_p,
2074 struct walk_stmt_info *wi)
2076 gimple *stmt = gsi_stmt (*gsi_p);
2078 *handled_ops_p = true;
2079 switch (gimple_code (stmt))
2081 WALK_SUBSTMTS;
2083 case GIMPLE_OMP_FOR:
2084 if (gimple_omp_for_combined_into_p (stmt)
2085 && gimple_omp_for_kind (stmt)
2086 == *(const enum gf_mask *) (wi->info))
2088 wi->info = stmt;
2089 return integer_zero_node;
2091 break;
2092 default:
2093 break;
2095 return NULL;
2098 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2100 static void
2101 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2102 omp_context *outer_ctx)
2104 struct walk_stmt_info wi;
2106 memset (&wi, 0, sizeof (wi));
2107 wi.val_only = true;
2108 wi.info = (void *) &msk;
2109 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2110 if (wi.info != (void *) &msk)
2112 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2113 struct omp_for_data fd;
2114 omp_extract_for_data (for_stmt, &fd, NULL);
2115 /* We need two temporaries with fd.loop.v type (istart/iend)
2116 and then (fd.collapse - 1) temporaries with the same
2117 type for count2 ... countN-1 vars if not constant. */
2118 size_t count = 2, i;
2119 tree type = fd.iter_type;
2120 if (fd.collapse > 1
2121 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2123 count += fd.collapse - 1;
2124 /* If there are lastprivate clauses on the inner
2125 GIMPLE_OMP_FOR, add one more temporaries for the total number
2126 of iterations (product of count1 ... countN-1). */
2127 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2128 OMP_CLAUSE_LASTPRIVATE)
2129 || (msk == GF_OMP_FOR_KIND_FOR
2130 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2131 OMP_CLAUSE_LASTPRIVATE)))
2133 tree temp = create_tmp_var (type);
2134 tree c = build_omp_clause (UNKNOWN_LOCATION,
2135 OMP_CLAUSE__LOOPTEMP_);
2136 insert_decl_map (&outer_ctx->cb, temp, temp);
2137 OMP_CLAUSE_DECL (c) = temp;
2138 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2139 gimple_omp_taskreg_set_clauses (stmt, c);
2141 if (fd.non_rect
2142 && fd.last_nonrect == fd.first_nonrect + 1)
2143 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2144 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2146 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2147 tree type2 = TREE_TYPE (v);
2148 count++;
2149 for (i = 0; i < 3; i++)
2151 tree temp = create_tmp_var (type2);
2152 tree c = build_omp_clause (UNKNOWN_LOCATION,
2153 OMP_CLAUSE__LOOPTEMP_);
2154 insert_decl_map (&outer_ctx->cb, temp, temp);
2155 OMP_CLAUSE_DECL (c) = temp;
2156 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2157 gimple_omp_taskreg_set_clauses (stmt, c);
2161 for (i = 0; i < count; i++)
2163 tree temp = create_tmp_var (type);
2164 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2165 insert_decl_map (&outer_ctx->cb, temp, temp);
2166 OMP_CLAUSE_DECL (c) = temp;
2167 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2168 gimple_omp_taskreg_set_clauses (stmt, c);
2171 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2172 && omp_find_clause (gimple_omp_task_clauses (stmt),
2173 OMP_CLAUSE_REDUCTION))
2175 tree type = build_pointer_type (pointer_sized_int_node);
2176 tree temp = create_tmp_var (type);
2177 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2178 insert_decl_map (&outer_ctx->cb, temp, temp);
2179 OMP_CLAUSE_DECL (c) = temp;
2180 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2181 gimple_omp_task_set_clauses (stmt, c);
2185 /* Scan an OpenMP parallel directive. */
2187 static void
2188 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2190 omp_context *ctx;
2191 tree name;
2192 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2194 /* Ignore parallel directives with empty bodies, unless there
2195 are copyin clauses. */
2196 if (optimize > 0
2197 && empty_body_p (gimple_omp_body (stmt))
2198 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2199 OMP_CLAUSE_COPYIN) == NULL)
2201 gsi_replace (gsi, gimple_build_nop (), false);
2202 return;
2205 if (gimple_omp_parallel_combined_p (stmt))
2206 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2207 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2208 OMP_CLAUSE_REDUCTION);
2209 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2210 if (OMP_CLAUSE_REDUCTION_TASK (c))
2212 tree type = build_pointer_type (pointer_sized_int_node);
2213 tree temp = create_tmp_var (type);
2214 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2215 if (outer_ctx)
2216 insert_decl_map (&outer_ctx->cb, temp, temp);
2217 OMP_CLAUSE_DECL (c) = temp;
2218 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2219 gimple_omp_parallel_set_clauses (stmt, c);
2220 break;
2222 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2223 break;
2225 ctx = new_omp_context (stmt, outer_ctx);
2226 taskreg_contexts.safe_push (ctx);
2227 if (taskreg_nesting_level > 1)
2228 ctx->is_nested = true;
2229 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2230 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2231 name = create_tmp_var_name (".omp_data_s");
2232 name = build_decl (gimple_location (stmt),
2233 TYPE_DECL, name, ctx->record_type);
2234 DECL_ARTIFICIAL (name) = 1;
2235 DECL_NAMELESS (name) = 1;
2236 TYPE_NAME (ctx->record_type) = name;
2237 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2238 create_omp_child_function (ctx, false);
2239 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2241 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2242 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2244 if (TYPE_FIELDS (ctx->record_type) == NULL)
2245 ctx->record_type = ctx->receiver_decl = NULL;
2248 /* Scan an OpenMP task directive. */
2250 static void
2251 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2253 omp_context *ctx;
2254 tree name, t;
2255 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2257 /* Ignore task directives with empty bodies, unless they have depend
2258 clause. */
2259 if (optimize > 0
2260 && gimple_omp_body (stmt)
2261 && empty_body_p (gimple_omp_body (stmt))
2262 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2264 gsi_replace (gsi, gimple_build_nop (), false);
2265 return;
2268 if (gimple_omp_task_taskloop_p (stmt))
2269 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2271 ctx = new_omp_context (stmt, outer_ctx);
2273 if (gimple_omp_task_taskwait_p (stmt))
2275 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2276 return;
2279 taskreg_contexts.safe_push (ctx);
2280 if (taskreg_nesting_level > 1)
2281 ctx->is_nested = true;
2282 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2283 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2284 name = create_tmp_var_name (".omp_data_s");
2285 name = build_decl (gimple_location (stmt),
2286 TYPE_DECL, name, ctx->record_type);
2287 DECL_ARTIFICIAL (name) = 1;
2288 DECL_NAMELESS (name) = 1;
2289 TYPE_NAME (ctx->record_type) = name;
2290 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2291 create_omp_child_function (ctx, false);
2292 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2294 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2296 if (ctx->srecord_type)
2298 name = create_tmp_var_name (".omp_data_a");
2299 name = build_decl (gimple_location (stmt),
2300 TYPE_DECL, name, ctx->srecord_type);
2301 DECL_ARTIFICIAL (name) = 1;
2302 DECL_NAMELESS (name) = 1;
2303 TYPE_NAME (ctx->srecord_type) = name;
2304 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2305 create_omp_child_function (ctx, true);
2308 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2310 if (TYPE_FIELDS (ctx->record_type) == NULL)
2312 ctx->record_type = ctx->receiver_decl = NULL;
2313 t = build_int_cst (long_integer_type_node, 0);
2314 gimple_omp_task_set_arg_size (stmt, t);
2315 t = build_int_cst (long_integer_type_node, 1);
2316 gimple_omp_task_set_arg_align (stmt, t);
2320 /* Helper function for finish_taskreg_scan, called through walk_tree.
2321 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2322 tree, replace it in the expression. */
2324 static tree
2325 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2327 if (VAR_P (*tp))
2329 omp_context *ctx = (omp_context *) data;
2330 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2331 if (t != *tp)
2333 if (DECL_HAS_VALUE_EXPR_P (t))
2334 t = unshare_expr (DECL_VALUE_EXPR (t));
2335 *tp = t;
2337 *walk_subtrees = 0;
2339 else if (IS_TYPE_OR_DECL_P (*tp))
2340 *walk_subtrees = 0;
2341 return NULL_TREE;
2344 /* If any decls have been made addressable during scan_omp,
2345 adjust their fields if needed, and layout record types
2346 of parallel/task constructs. */
2348 static void
2349 finish_taskreg_scan (omp_context *ctx)
2351 if (ctx->record_type == NULL_TREE)
2352 return;
2354 /* If any task_shared_vars were needed, verify all
2355 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2356 statements if use_pointer_for_field hasn't changed
2357 because of that. If it did, update field types now. */
2358 if (task_shared_vars)
2360 tree c;
2362 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2363 c; c = OMP_CLAUSE_CHAIN (c))
2364 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2365 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2367 tree decl = OMP_CLAUSE_DECL (c);
2369 /* Global variables don't need to be copied,
2370 the receiver side will use them directly. */
2371 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2372 continue;
2373 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2374 || !use_pointer_for_field (decl, ctx))
2375 continue;
2376 tree field = lookup_field (decl, ctx);
2377 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2378 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2379 continue;
2380 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2381 TREE_THIS_VOLATILE (field) = 0;
2382 DECL_USER_ALIGN (field) = 0;
2383 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2384 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2385 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2386 if (ctx->srecord_type)
2388 tree sfield = lookup_sfield (decl, ctx);
2389 TREE_TYPE (sfield) = TREE_TYPE (field);
2390 TREE_THIS_VOLATILE (sfield) = 0;
2391 DECL_USER_ALIGN (sfield) = 0;
2392 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2393 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2394 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2399 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2401 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2402 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2403 if (c)
2405 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2406 expects to find it at the start of data. */
2407 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2408 tree *p = &TYPE_FIELDS (ctx->record_type);
2409 while (*p)
2410 if (*p == f)
2412 *p = DECL_CHAIN (*p);
2413 break;
2415 else
2416 p = &DECL_CHAIN (*p);
2417 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2418 TYPE_FIELDS (ctx->record_type) = f;
2420 layout_type (ctx->record_type);
2421 fixup_child_record_type (ctx);
2423 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2425 layout_type (ctx->record_type);
2426 fixup_child_record_type (ctx);
2428 else
2430 location_t loc = gimple_location (ctx->stmt);
2431 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2432 tree detach_clause
2433 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2434 OMP_CLAUSE_DETACH);
2435 /* Move VLA fields to the end. */
2436 p = &TYPE_FIELDS (ctx->record_type);
2437 while (*p)
2438 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2439 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2441 *q = *p;
2442 *p = TREE_CHAIN (*p);
2443 TREE_CHAIN (*q) = NULL_TREE;
2444 q = &TREE_CHAIN (*q);
2446 else
2447 p = &DECL_CHAIN (*p);
2448 *p = vla_fields;
2449 if (gimple_omp_task_taskloop_p (ctx->stmt))
2451 /* Move fields corresponding to first and second _looptemp_
2452 clause first. There are filled by GOMP_taskloop
2453 and thus need to be in specific positions. */
2454 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2455 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2456 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2457 OMP_CLAUSE__LOOPTEMP_);
2458 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2459 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2460 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2461 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2462 p = &TYPE_FIELDS (ctx->record_type);
2463 while (*p)
2464 if (*p == f1 || *p == f2 || *p == f3)
2465 *p = DECL_CHAIN (*p);
2466 else
2467 p = &DECL_CHAIN (*p);
2468 DECL_CHAIN (f1) = f2;
2469 if (c3)
2471 DECL_CHAIN (f2) = f3;
2472 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2474 else
2475 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2476 TYPE_FIELDS (ctx->record_type) = f1;
2477 if (ctx->srecord_type)
2479 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2480 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2481 if (c3)
2482 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2483 p = &TYPE_FIELDS (ctx->srecord_type);
2484 while (*p)
2485 if (*p == f1 || *p == f2 || *p == f3)
2486 *p = DECL_CHAIN (*p);
2487 else
2488 p = &DECL_CHAIN (*p);
2489 DECL_CHAIN (f1) = f2;
2490 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2491 if (c3)
2493 DECL_CHAIN (f2) = f3;
2494 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2496 else
2497 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2498 TYPE_FIELDS (ctx->srecord_type) = f1;
2501 if (detach_clause)
2503 tree c, field;
2505 /* Look for a firstprivate clause with the detach event handle. */
2506 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2507 c; c = OMP_CLAUSE_CHAIN (c))
2509 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2510 continue;
2511 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2512 == OMP_CLAUSE_DECL (detach_clause))
2513 break;
2516 gcc_assert (c);
2517 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2519 /* Move field corresponding to the detach clause first.
2520 This is filled by GOMP_task and needs to be in a
2521 specific position. */
2522 p = &TYPE_FIELDS (ctx->record_type);
2523 while (*p)
2524 if (*p == field)
2525 *p = DECL_CHAIN (*p);
2526 else
2527 p = &DECL_CHAIN (*p);
2528 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2529 TYPE_FIELDS (ctx->record_type) = field;
2530 if (ctx->srecord_type)
2532 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2533 p = &TYPE_FIELDS (ctx->srecord_type);
2534 while (*p)
2535 if (*p == field)
2536 *p = DECL_CHAIN (*p);
2537 else
2538 p = &DECL_CHAIN (*p);
2539 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2540 TYPE_FIELDS (ctx->srecord_type) = field;
2543 layout_type (ctx->record_type);
2544 fixup_child_record_type (ctx);
2545 if (ctx->srecord_type)
2546 layout_type (ctx->srecord_type);
2547 tree t = fold_convert_loc (loc, long_integer_type_node,
2548 TYPE_SIZE_UNIT (ctx->record_type));
2549 if (TREE_CODE (t) != INTEGER_CST)
2551 t = unshare_expr (t);
2552 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2554 gimple_omp_task_set_arg_size (ctx->stmt, t);
2555 t = build_int_cst (long_integer_type_node,
2556 TYPE_ALIGN_UNIT (ctx->record_type));
2557 gimple_omp_task_set_arg_align (ctx->stmt, t);
2561 /* Find the enclosing offload context. */
2563 static omp_context *
2564 enclosing_target_ctx (omp_context *ctx)
2566 for (; ctx; ctx = ctx->outer)
2567 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2568 break;
2570 return ctx;
2573 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2574 construct.
2575 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2577 static bool
2578 ctx_in_oacc_kernels_region (omp_context *ctx)
2580 for (;ctx != NULL; ctx = ctx->outer)
2582 gimple *stmt = ctx->stmt;
2583 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2584 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2585 return true;
2588 return false;
2591 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2592 (This doesn't include OpenACC 'kernels' decomposed parts.)
2593 Until kernels handling moves to use the same loop indirection
2594 scheme as parallel, we need to do this checking early. */
2596 static unsigned
2597 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2599 bool checking = true;
2600 unsigned outer_mask = 0;
2601 unsigned this_mask = 0;
2602 bool has_seq = false, has_auto = false;
2604 if (ctx->outer)
2605 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2606 if (!stmt)
2608 checking = false;
2609 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2610 return outer_mask;
2611 stmt = as_a <gomp_for *> (ctx->stmt);
2614 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2616 switch (OMP_CLAUSE_CODE (c))
2618 case OMP_CLAUSE_GANG:
2619 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2620 break;
2621 case OMP_CLAUSE_WORKER:
2622 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2623 break;
2624 case OMP_CLAUSE_VECTOR:
2625 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2626 break;
2627 case OMP_CLAUSE_SEQ:
2628 has_seq = true;
2629 break;
2630 case OMP_CLAUSE_AUTO:
2631 has_auto = true;
2632 break;
2633 default:
2634 break;
2638 if (checking)
2640 if (has_seq && (this_mask || has_auto))
2641 error_at (gimple_location (stmt), "%<seq%> overrides other"
2642 " OpenACC loop specifiers");
2643 else if (has_auto && this_mask)
2644 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2645 " OpenACC loop specifiers");
2647 if (this_mask & outer_mask)
2648 error_at (gimple_location (stmt), "inner loop uses same"
2649 " OpenACC parallelism as containing loop");
2652 return outer_mask | this_mask;
2655 /* Scan a GIMPLE_OMP_FOR. */
2657 static omp_context *
2658 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2660 omp_context *ctx;
2661 size_t i;
2662 tree clauses = gimple_omp_for_clauses (stmt);
2664 ctx = new_omp_context (stmt, outer_ctx);
2666 if (is_gimple_omp_oacc (stmt))
2668 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2670 if (!(tgt && is_oacc_kernels (tgt)))
2671 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2673 tree c_op0;
2674 switch (OMP_CLAUSE_CODE (c))
2676 case OMP_CLAUSE_GANG:
2677 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2678 break;
2680 case OMP_CLAUSE_WORKER:
2681 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2682 break;
2684 case OMP_CLAUSE_VECTOR:
2685 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2686 break;
2688 default:
2689 continue;
2692 if (c_op0)
2694 /* By construction, this is impossible for OpenACC 'kernels'
2695 decomposed parts. */
2696 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2698 error_at (OMP_CLAUSE_LOCATION (c),
2699 "argument not permitted on %qs clause",
2700 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2701 if (tgt)
2702 inform (gimple_location (tgt->stmt),
2703 "enclosing parent compute construct");
2704 else if (oacc_get_fn_attrib (current_function_decl))
2705 inform (DECL_SOURCE_LOCATION (current_function_decl),
2706 "enclosing routine");
2707 else
2708 gcc_unreachable ();
2712 if (tgt && is_oacc_kernels (tgt))
2713 check_oacc_kernel_gwv (stmt, ctx);
2715 /* Collect all variables named in reductions on this loop. Ensure
2716 that, if this loop has a reduction on some variable v, and there is
2717 a reduction on v somewhere in an outer context, then there is a
2718 reduction on v on all intervening loops as well. */
2719 tree local_reduction_clauses = NULL;
2720 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2722 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2723 local_reduction_clauses
2724 = tree_cons (NULL, c, local_reduction_clauses);
2726 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2727 ctx->outer_reduction_clauses
2728 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2729 ctx->outer->outer_reduction_clauses);
2730 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2731 tree local_iter = local_reduction_clauses;
2732 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2734 tree local_clause = TREE_VALUE (local_iter);
2735 tree local_var = OMP_CLAUSE_DECL (local_clause);
2736 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2737 bool have_outer_reduction = false;
2738 tree ctx_iter = outer_reduction_clauses;
2739 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2741 tree outer_clause = TREE_VALUE (ctx_iter);
2742 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2743 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2744 if (outer_var == local_var && outer_op != local_op)
2746 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2747 "conflicting reduction operations for %qE",
2748 local_var);
2749 inform (OMP_CLAUSE_LOCATION (outer_clause),
2750 "location of the previous reduction for %qE",
2751 outer_var);
2753 if (outer_var == local_var)
2755 have_outer_reduction = true;
2756 break;
2759 if (have_outer_reduction)
2761 /* There is a reduction on outer_var both on this loop and on
2762 some enclosing loop. Walk up the context tree until such a
2763 loop with a reduction on outer_var is found, and complain
2764 about all intervening loops that do not have such a
2765 reduction. */
2766 struct omp_context *curr_loop = ctx->outer;
2767 bool found = false;
2768 while (curr_loop != NULL)
2770 tree curr_iter = curr_loop->local_reduction_clauses;
2771 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2773 tree curr_clause = TREE_VALUE (curr_iter);
2774 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2775 if (curr_var == local_var)
2777 found = true;
2778 break;
2781 if (!found)
2782 warning_at (gimple_location (curr_loop->stmt), 0,
2783 "nested loop in reduction needs "
2784 "reduction clause for %qE",
2785 local_var);
2786 else
2787 break;
2788 curr_loop = curr_loop->outer;
2792 ctx->local_reduction_clauses = local_reduction_clauses;
2793 ctx->outer_reduction_clauses
2794 = chainon (unshare_expr (ctx->local_reduction_clauses),
2795 ctx->outer_reduction_clauses);
2797 if (tgt && is_oacc_kernels (tgt))
2799 /* Strip out reductions, as they are not handled yet. */
2800 tree *prev_ptr = &clauses;
2802 while (tree probe = *prev_ptr)
2804 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2806 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2807 *prev_ptr = *next_ptr;
2808 else
2809 prev_ptr = next_ptr;
2812 gimple_omp_for_set_clauses (stmt, clauses);
2816 scan_sharing_clauses (clauses, ctx);
2818 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2819 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2821 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2822 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2823 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2824 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2826 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2827 return ctx;
2830 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2832 static void
2833 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2834 omp_context *outer_ctx)
2836 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2837 gsi_replace (gsi, bind, false);
2838 gimple_seq seq = NULL;
2839 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2840 tree cond = create_tmp_var_raw (integer_type_node);
2841 DECL_CONTEXT (cond) = current_function_decl;
2842 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2843 gimple_bind_set_vars (bind, cond);
2844 gimple_call_set_lhs (g, cond);
2845 gimple_seq_add_stmt (&seq, g);
2846 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2847 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2848 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2849 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2850 gimple_seq_add_stmt (&seq, g);
2851 g = gimple_build_label (lab1);
2852 gimple_seq_add_stmt (&seq, g);
2853 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2854 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2855 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2856 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2857 gimple_omp_for_set_clauses (new_stmt, clause);
2858 gimple_seq_add_stmt (&seq, new_stmt);
2859 g = gimple_build_goto (lab3);
2860 gimple_seq_add_stmt (&seq, g);
2861 g = gimple_build_label (lab2);
2862 gimple_seq_add_stmt (&seq, g);
2863 gimple_seq_add_stmt (&seq, stmt);
2864 g = gimple_build_label (lab3);
2865 gimple_seq_add_stmt (&seq, g);
2866 gimple_bind_set_body (bind, seq);
2867 update_stmt (bind);
2868 scan_omp_for (new_stmt, outer_ctx);
2869 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2872 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2873 struct walk_stmt_info *);
2874 static omp_context *maybe_lookup_ctx (gimple *);
2876 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2877 for scan phase loop. */
2879 static void
2880 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2881 omp_context *outer_ctx)
2883 /* The only change between inclusive and exclusive scan will be
2884 within the first simd loop, so just use inclusive in the
2885 worksharing loop. */
2886 outer_ctx->scan_inclusive = true;
2887 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2888 OMP_CLAUSE_DECL (c) = integer_zero_node;
2890 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2891 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2892 gsi_replace (gsi, input_stmt, false);
2893 gimple_seq input_body = NULL;
2894 gimple_seq_add_stmt (&input_body, stmt);
2895 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2897 gimple_stmt_iterator input1_gsi = gsi_none ();
2898 struct walk_stmt_info wi;
2899 memset (&wi, 0, sizeof (wi));
2900 wi.val_only = true;
2901 wi.info = (void *) &input1_gsi;
2902 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2903 gcc_assert (!gsi_end_p (input1_gsi));
2905 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2906 gsi_next (&input1_gsi);
2907 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2908 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2909 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2910 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2911 std::swap (input_stmt1, scan_stmt1);
2913 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2914 gimple_omp_set_body (input_stmt1, NULL);
2916 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2917 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2919 gimple_omp_set_body (input_stmt1, input_body1);
2920 gimple_omp_set_body (scan_stmt1, NULL);
2922 gimple_stmt_iterator input2_gsi = gsi_none ();
2923 memset (&wi, 0, sizeof (wi));
2924 wi.val_only = true;
2925 wi.info = (void *) &input2_gsi;
2926 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2927 NULL, &wi);
2928 gcc_assert (!gsi_end_p (input2_gsi));
2930 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2931 gsi_next (&input2_gsi);
2932 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2933 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2934 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2935 std::swap (input_stmt2, scan_stmt2);
2937 gimple_omp_set_body (input_stmt2, NULL);
2939 gimple_omp_set_body (input_stmt, input_body);
2940 gimple_omp_set_body (scan_stmt, scan_body);
2942 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2943 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2945 ctx = new_omp_context (scan_stmt, outer_ctx);
2946 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2948 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2951 /* Scan an OpenMP sections directive. */
2953 static void
2954 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2956 omp_context *ctx;
2958 ctx = new_omp_context (stmt, outer_ctx);
2959 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2960 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2963 /* Scan an OpenMP single directive. */
2965 static void
2966 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2968 omp_context *ctx;
2969 tree name;
2971 ctx = new_omp_context (stmt, outer_ctx);
2972 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2973 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2974 name = create_tmp_var_name (".omp_copy_s");
2975 name = build_decl (gimple_location (stmt),
2976 TYPE_DECL, name, ctx->record_type);
2977 TYPE_NAME (ctx->record_type) = name;
2979 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2980 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2982 if (TYPE_FIELDS (ctx->record_type) == NULL)
2983 ctx->record_type = NULL;
2984 else
2985 layout_type (ctx->record_type);
2988 /* Scan a GIMPLE_OMP_TARGET. */
2990 static void
2991 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2993 omp_context *ctx;
2994 tree name;
2995 bool offloaded = is_gimple_omp_offloaded (stmt);
2996 tree clauses = gimple_omp_target_clauses (stmt);
2998 ctx = new_omp_context (stmt, outer_ctx);
2999 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3000 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3001 name = create_tmp_var_name (".omp_data_t");
3002 name = build_decl (gimple_location (stmt),
3003 TYPE_DECL, name, ctx->record_type);
3004 DECL_ARTIFICIAL (name) = 1;
3005 DECL_NAMELESS (name) = 1;
3006 TYPE_NAME (ctx->record_type) = name;
3007 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3009 if (offloaded)
3011 create_omp_child_function (ctx, false);
3012 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3015 scan_sharing_clauses (clauses, ctx);
3016 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3018 if (TYPE_FIELDS (ctx->record_type) == NULL)
3019 ctx->record_type = ctx->receiver_decl = NULL;
3020 else
3022 TYPE_FIELDS (ctx->record_type)
3023 = nreverse (TYPE_FIELDS (ctx->record_type));
3024 if (flag_checking)
3026 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3027 for (tree field = TYPE_FIELDS (ctx->record_type);
3028 field;
3029 field = DECL_CHAIN (field))
3030 gcc_assert (DECL_ALIGN (field) == align);
3032 layout_type (ctx->record_type);
3033 if (offloaded)
3034 fixup_child_record_type (ctx);
3037 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3039 error_at (gimple_location (stmt),
3040 "%<target%> construct with nested %<teams%> construct "
3041 "contains directives outside of the %<teams%> construct");
3042 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3046 /* Scan an OpenMP teams directive. */
3048 static void
3049 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3051 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3053 if (!gimple_omp_teams_host (stmt))
3055 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3056 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3057 return;
3059 taskreg_contexts.safe_push (ctx);
3060 gcc_assert (taskreg_nesting_level == 1);
3061 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3062 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3063 tree name = create_tmp_var_name (".omp_data_s");
3064 name = build_decl (gimple_location (stmt),
3065 TYPE_DECL, name, ctx->record_type);
3066 DECL_ARTIFICIAL (name) = 1;
3067 DECL_NAMELESS (name) = 1;
3068 TYPE_NAME (ctx->record_type) = name;
3069 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3070 create_omp_child_function (ctx, false);
3071 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3073 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3074 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3076 if (TYPE_FIELDS (ctx->record_type) == NULL)
3077 ctx->record_type = ctx->receiver_decl = NULL;
3080 /* Check nesting restrictions. */
3081 static bool
3082 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3084 tree c;
3086 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3087 inside an OpenACC CTX. */
3088 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3089 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3090 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3092 else if (!(is_gimple_omp (stmt)
3093 && is_gimple_omp_oacc (stmt)))
3095 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3097 error_at (gimple_location (stmt),
3098 "non-OpenACC construct inside of OpenACC routine");
3099 return false;
3101 else
3102 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3103 if (is_gimple_omp (octx->stmt)
3104 && is_gimple_omp_oacc (octx->stmt))
3106 error_at (gimple_location (stmt),
3107 "non-OpenACC construct inside of OpenACC region");
3108 return false;
3112 if (ctx != NULL)
3114 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3115 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3117 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3118 ctx->teams_nested_p = true;
3119 else
3120 ctx->nonteams_nested_p = true;
3122 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3123 && ctx->outer
3124 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3125 ctx = ctx->outer;
3126 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3127 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3128 && !ctx->loop_p)
3130 c = NULL_TREE;
3131 if (ctx->order_concurrent
3132 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3133 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3134 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3136 error_at (gimple_location (stmt),
3137 "OpenMP constructs other than %<parallel%>, %<loop%>"
3138 " or %<simd%> may not be nested inside a region with"
3139 " the %<order(concurrent)%> clause");
3140 return false;
3142 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3144 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3145 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3147 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3148 && (ctx->outer == NULL
3149 || !gimple_omp_for_combined_into_p (ctx->stmt)
3150 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3151 || (gimple_omp_for_kind (ctx->outer->stmt)
3152 != GF_OMP_FOR_KIND_FOR)
3153 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3155 error_at (gimple_location (stmt),
3156 "%<ordered simd threads%> must be closely "
3157 "nested inside of %<%s simd%> region",
3158 lang_GNU_Fortran () ? "do" : "for");
3159 return false;
3161 return true;
3164 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3165 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3166 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3167 return true;
3168 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3169 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3170 return true;
3171 error_at (gimple_location (stmt),
3172 "OpenMP constructs other than "
3173 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3174 "not be nested inside %<simd%> region");
3175 return false;
3177 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3179 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3180 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3181 && omp_find_clause (gimple_omp_for_clauses (stmt),
3182 OMP_CLAUSE_BIND) == NULL_TREE))
3183 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3185 error_at (gimple_location (stmt),
3186 "only %<distribute%>, %<parallel%> or %<loop%> "
3187 "regions are allowed to be strictly nested inside "
3188 "%<teams%> region");
3189 return false;
3192 else if (ctx->order_concurrent
3193 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3194 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3195 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3196 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3198 if (ctx->loop_p)
3199 error_at (gimple_location (stmt),
3200 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3201 "%<simd%> may not be nested inside a %<loop%> region");
3202 else
3203 error_at (gimple_location (stmt),
3204 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3205 "%<simd%> may not be nested inside a region with "
3206 "the %<order(concurrent)%> clause");
3207 return false;
3210 switch (gimple_code (stmt))
3212 case GIMPLE_OMP_FOR:
3213 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3214 return true;
3215 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3217 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3219 error_at (gimple_location (stmt),
3220 "%<distribute%> region must be strictly nested "
3221 "inside %<teams%> construct");
3222 return false;
3224 return true;
3226 /* We split taskloop into task and nested taskloop in it. */
3227 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3228 return true;
3229 /* For now, hope this will change and loop bind(parallel) will not
3230 be allowed in lots of contexts. */
3231 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3232 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3233 return true;
3234 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3236 bool ok = false;
3238 if (ctx)
3239 switch (gimple_code (ctx->stmt))
3241 case GIMPLE_OMP_FOR:
3242 ok = (gimple_omp_for_kind (ctx->stmt)
3243 == GF_OMP_FOR_KIND_OACC_LOOP);
3244 break;
3246 case GIMPLE_OMP_TARGET:
3247 switch (gimple_omp_target_kind (ctx->stmt))
3249 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3250 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3251 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3252 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3253 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3254 ok = true;
3255 break;
3257 default:
3258 break;
3261 default:
3262 break;
3264 else if (oacc_get_fn_attrib (current_function_decl))
3265 ok = true;
3266 if (!ok)
3268 error_at (gimple_location (stmt),
3269 "OpenACC loop directive must be associated with"
3270 " an OpenACC compute region");
3271 return false;
3274 /* FALLTHRU */
3275 case GIMPLE_CALL:
3276 if (is_gimple_call (stmt)
3277 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3278 == BUILT_IN_GOMP_CANCEL
3279 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3280 == BUILT_IN_GOMP_CANCELLATION_POINT))
3282 const char *bad = NULL;
3283 const char *kind = NULL;
3284 const char *construct
3285 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3286 == BUILT_IN_GOMP_CANCEL)
3287 ? "cancel"
3288 : "cancellation point";
3289 if (ctx == NULL)
3291 error_at (gimple_location (stmt), "orphaned %qs construct",
3292 construct);
3293 return false;
3295 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3296 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3297 : 0)
3299 case 1:
3300 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3301 bad = "parallel";
3302 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3303 == BUILT_IN_GOMP_CANCEL
3304 && !integer_zerop (gimple_call_arg (stmt, 1)))
3305 ctx->cancellable = true;
3306 kind = "parallel";
3307 break;
3308 case 2:
3309 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3310 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3311 bad = "for";
3312 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3313 == BUILT_IN_GOMP_CANCEL
3314 && !integer_zerop (gimple_call_arg (stmt, 1)))
3316 ctx->cancellable = true;
3317 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3318 OMP_CLAUSE_NOWAIT))
3319 warning_at (gimple_location (stmt), 0,
3320 "%<cancel for%> inside "
3321 "%<nowait%> for construct");
3322 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3323 OMP_CLAUSE_ORDERED))
3324 warning_at (gimple_location (stmt), 0,
3325 "%<cancel for%> inside "
3326 "%<ordered%> for construct");
3328 kind = "for";
3329 break;
3330 case 4:
3331 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3332 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3333 bad = "sections";
3334 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3335 == BUILT_IN_GOMP_CANCEL
3336 && !integer_zerop (gimple_call_arg (stmt, 1)))
3338 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3340 ctx->cancellable = true;
3341 if (omp_find_clause (gimple_omp_sections_clauses
3342 (ctx->stmt),
3343 OMP_CLAUSE_NOWAIT))
3344 warning_at (gimple_location (stmt), 0,
3345 "%<cancel sections%> inside "
3346 "%<nowait%> sections construct");
3348 else
3350 gcc_assert (ctx->outer
3351 && gimple_code (ctx->outer->stmt)
3352 == GIMPLE_OMP_SECTIONS);
3353 ctx->outer->cancellable = true;
3354 if (omp_find_clause (gimple_omp_sections_clauses
3355 (ctx->outer->stmt),
3356 OMP_CLAUSE_NOWAIT))
3357 warning_at (gimple_location (stmt), 0,
3358 "%<cancel sections%> inside "
3359 "%<nowait%> sections construct");
3362 kind = "sections";
3363 break;
3364 case 8:
3365 if (!is_task_ctx (ctx)
3366 && (!is_taskloop_ctx (ctx)
3367 || ctx->outer == NULL
3368 || !is_task_ctx (ctx->outer)))
3369 bad = "task";
3370 else
3372 for (omp_context *octx = ctx->outer;
3373 octx; octx = octx->outer)
3375 switch (gimple_code (octx->stmt))
3377 case GIMPLE_OMP_TASKGROUP:
3378 break;
3379 case GIMPLE_OMP_TARGET:
3380 if (gimple_omp_target_kind (octx->stmt)
3381 != GF_OMP_TARGET_KIND_REGION)
3382 continue;
3383 /* FALLTHRU */
3384 case GIMPLE_OMP_PARALLEL:
3385 case GIMPLE_OMP_TEAMS:
3386 error_at (gimple_location (stmt),
3387 "%<%s taskgroup%> construct not closely "
3388 "nested inside of %<taskgroup%> region",
3389 construct);
3390 return false;
3391 case GIMPLE_OMP_TASK:
3392 if (gimple_omp_task_taskloop_p (octx->stmt)
3393 && octx->outer
3394 && is_taskloop_ctx (octx->outer))
3396 tree clauses
3397 = gimple_omp_for_clauses (octx->outer->stmt);
3398 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3399 break;
3401 continue;
3402 default:
3403 continue;
3405 break;
3407 ctx->cancellable = true;
3409 kind = "taskgroup";
3410 break;
3411 default:
3412 error_at (gimple_location (stmt), "invalid arguments");
3413 return false;
3415 if (bad)
3417 error_at (gimple_location (stmt),
3418 "%<%s %s%> construct not closely nested inside of %qs",
3419 construct, kind, bad);
3420 return false;
3423 /* FALLTHRU */
3424 case GIMPLE_OMP_SECTIONS:
3425 case GIMPLE_OMP_SINGLE:
3426 for (; ctx != NULL; ctx = ctx->outer)
3427 switch (gimple_code (ctx->stmt))
3429 case GIMPLE_OMP_FOR:
3430 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3431 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3432 break;
3433 /* FALLTHRU */
3434 case GIMPLE_OMP_SECTIONS:
3435 case GIMPLE_OMP_SINGLE:
3436 case GIMPLE_OMP_ORDERED:
3437 case GIMPLE_OMP_MASTER:
3438 case GIMPLE_OMP_MASKED:
3439 case GIMPLE_OMP_TASK:
3440 case GIMPLE_OMP_CRITICAL:
3441 if (is_gimple_call (stmt))
3443 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3444 != BUILT_IN_GOMP_BARRIER)
3445 return true;
3446 error_at (gimple_location (stmt),
3447 "barrier region may not be closely nested inside "
3448 "of work-sharing, %<loop%>, %<critical%>, "
3449 "%<ordered%>, %<master%>, %<masked%>, explicit "
3450 "%<task%> or %<taskloop%> region");
3451 return false;
3453 error_at (gimple_location (stmt),
3454 "work-sharing region may not be closely nested inside "
3455 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3456 "%<master%>, %<masked%>, explicit %<task%> or "
3457 "%<taskloop%> region");
3458 return false;
3459 case GIMPLE_OMP_PARALLEL:
3460 case GIMPLE_OMP_TEAMS:
3461 return true;
3462 case GIMPLE_OMP_TARGET:
3463 if (gimple_omp_target_kind (ctx->stmt)
3464 == GF_OMP_TARGET_KIND_REGION)
3465 return true;
3466 break;
3467 default:
3468 break;
3470 break;
3471 case GIMPLE_OMP_MASTER:
3472 case GIMPLE_OMP_MASKED:
3473 for (; ctx != NULL; ctx = ctx->outer)
3474 switch (gimple_code (ctx->stmt))
3476 case GIMPLE_OMP_FOR:
3477 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3478 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3479 break;
3480 /* FALLTHRU */
3481 case GIMPLE_OMP_SECTIONS:
3482 case GIMPLE_OMP_SINGLE:
3483 case GIMPLE_OMP_TASK:
3484 error_at (gimple_location (stmt),
3485 "%qs region may not be closely nested inside "
3486 "of work-sharing, %<loop%>, explicit %<task%> or "
3487 "%<taskloop%> region",
3488 gimple_code (stmt) == GIMPLE_OMP_MASTER
3489 ? "master" : "masked");
3490 return false;
3491 case GIMPLE_OMP_PARALLEL:
3492 case GIMPLE_OMP_TEAMS:
3493 return true;
3494 case GIMPLE_OMP_TARGET:
3495 if (gimple_omp_target_kind (ctx->stmt)
3496 == GF_OMP_TARGET_KIND_REGION)
3497 return true;
3498 break;
3499 default:
3500 break;
3502 break;
3503 case GIMPLE_OMP_SCOPE:
3504 for (; ctx != NULL; ctx = ctx->outer)
3505 switch (gimple_code (ctx->stmt))
3507 case GIMPLE_OMP_FOR:
3508 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3509 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3510 break;
3511 /* FALLTHRU */
3512 case GIMPLE_OMP_SECTIONS:
3513 case GIMPLE_OMP_SINGLE:
3514 case GIMPLE_OMP_TASK:
3515 case GIMPLE_OMP_CRITICAL:
3516 case GIMPLE_OMP_ORDERED:
3517 case GIMPLE_OMP_MASTER:
3518 case GIMPLE_OMP_MASKED:
3519 error_at (gimple_location (stmt),
3520 "%<scope%> region may not be closely nested inside "
3521 "of work-sharing, %<loop%>, explicit %<task%>, "
3522 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3523 "or %<masked%> region");
3524 return false;
3525 case GIMPLE_OMP_PARALLEL:
3526 case GIMPLE_OMP_TEAMS:
3527 return true;
3528 case GIMPLE_OMP_TARGET:
3529 if (gimple_omp_target_kind (ctx->stmt)
3530 == GF_OMP_TARGET_KIND_REGION)
3531 return true;
3532 break;
3533 default:
3534 break;
3536 break;
3537 case GIMPLE_OMP_TASK:
3538 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3539 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3540 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3541 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3543 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3544 error_at (OMP_CLAUSE_LOCATION (c),
3545 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3546 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3547 return false;
3549 break;
3550 case GIMPLE_OMP_ORDERED:
3551 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3552 c; c = OMP_CLAUSE_CHAIN (c))
3554 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3556 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3557 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3558 continue;
3560 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3561 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3562 || kind == OMP_CLAUSE_DEPEND_SINK)
3564 tree oclause;
3565 /* Look for containing ordered(N) loop. */
3566 if (ctx == NULL
3567 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3568 || (oclause
3569 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3570 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3572 error_at (OMP_CLAUSE_LOCATION (c),
3573 "%<ordered%> construct with %<depend%> clause "
3574 "must be closely nested inside an %<ordered%> "
3575 "loop");
3576 return false;
3578 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3580 error_at (OMP_CLAUSE_LOCATION (c),
3581 "%<ordered%> construct with %<depend%> clause "
3582 "must be closely nested inside a loop with "
3583 "%<ordered%> clause with a parameter");
3584 return false;
3587 else
3589 error_at (OMP_CLAUSE_LOCATION (c),
3590 "invalid depend kind in omp %<ordered%> %<depend%>");
3591 return false;
3594 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3595 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3597 /* ordered simd must be closely nested inside of simd region,
3598 and simd region must not encounter constructs other than
3599 ordered simd, therefore ordered simd may be either orphaned,
3600 or ctx->stmt must be simd. The latter case is handled already
3601 earlier. */
3602 if (ctx != NULL)
3604 error_at (gimple_location (stmt),
3605 "%<ordered%> %<simd%> must be closely nested inside "
3606 "%<simd%> region");
3607 return false;
3610 for (; ctx != NULL; ctx = ctx->outer)
3611 switch (gimple_code (ctx->stmt))
3613 case GIMPLE_OMP_CRITICAL:
3614 case GIMPLE_OMP_TASK:
3615 case GIMPLE_OMP_ORDERED:
3616 ordered_in_taskloop:
3617 error_at (gimple_location (stmt),
3618 "%<ordered%> region may not be closely nested inside "
3619 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3620 "%<taskloop%> region");
3621 return false;
3622 case GIMPLE_OMP_FOR:
3623 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3624 goto ordered_in_taskloop;
3625 tree o;
3626 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3627 OMP_CLAUSE_ORDERED);
3628 if (o == NULL)
3630 error_at (gimple_location (stmt),
3631 "%<ordered%> region must be closely nested inside "
3632 "a loop region with an %<ordered%> clause");
3633 return false;
3635 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3636 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3638 error_at (gimple_location (stmt),
3639 "%<ordered%> region without %<depend%> clause may "
3640 "not be closely nested inside a loop region with "
3641 "an %<ordered%> clause with a parameter");
3642 return false;
3644 return true;
3645 case GIMPLE_OMP_TARGET:
3646 if (gimple_omp_target_kind (ctx->stmt)
3647 != GF_OMP_TARGET_KIND_REGION)
3648 break;
3649 /* FALLTHRU */
3650 case GIMPLE_OMP_PARALLEL:
3651 case GIMPLE_OMP_TEAMS:
3652 error_at (gimple_location (stmt),
3653 "%<ordered%> region must be closely nested inside "
3654 "a loop region with an %<ordered%> clause");
3655 return false;
3656 default:
3657 break;
3659 break;
3660 case GIMPLE_OMP_CRITICAL:
3662 tree this_stmt_name
3663 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3664 for (; ctx != NULL; ctx = ctx->outer)
3665 if (gomp_critical *other_crit
3666 = dyn_cast <gomp_critical *> (ctx->stmt))
3667 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3669 error_at (gimple_location (stmt),
3670 "%<critical%> region may not be nested inside "
3671 "a %<critical%> region with the same name");
3672 return false;
3675 break;
3676 case GIMPLE_OMP_TEAMS:
3677 if (ctx == NULL)
3678 break;
3679 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3680 || (gimple_omp_target_kind (ctx->stmt)
3681 != GF_OMP_TARGET_KIND_REGION))
3683 /* Teams construct can appear either strictly nested inside of
3684 target construct with no intervening stmts, or can be encountered
3685 only by initial task (so must not appear inside any OpenMP
3686 construct. */
3687 error_at (gimple_location (stmt),
3688 "%<teams%> construct must be closely nested inside of "
3689 "%<target%> construct or not nested in any OpenMP "
3690 "construct");
3691 return false;
3693 break;
3694 case GIMPLE_OMP_TARGET:
3695 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3696 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3697 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3698 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3700 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3701 error_at (OMP_CLAUSE_LOCATION (c),
3702 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3703 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3704 return false;
3706 if (is_gimple_omp_offloaded (stmt)
3707 && oacc_get_fn_attrib (cfun->decl) != NULL)
3709 error_at (gimple_location (stmt),
3710 "OpenACC region inside of OpenACC routine, nested "
3711 "parallelism not supported yet");
3712 return false;
3714 for (; ctx != NULL; ctx = ctx->outer)
3716 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3718 if (is_gimple_omp (stmt)
3719 && is_gimple_omp_oacc (stmt)
3720 && is_gimple_omp (ctx->stmt))
3722 error_at (gimple_location (stmt),
3723 "OpenACC construct inside of non-OpenACC region");
3724 return false;
3726 continue;
3729 const char *stmt_name, *ctx_stmt_name;
3730 switch (gimple_omp_target_kind (stmt))
3732 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3733 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3734 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3735 case GF_OMP_TARGET_KIND_ENTER_DATA:
3736 stmt_name = "target enter data"; break;
3737 case GF_OMP_TARGET_KIND_EXIT_DATA:
3738 stmt_name = "target exit data"; break;
3739 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3740 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3741 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3742 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3743 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3744 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3745 stmt_name = "enter data"; break;
3746 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3747 stmt_name = "exit data"; break;
3748 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3749 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3750 break;
3751 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3752 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3753 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3754 /* OpenACC 'kernels' decomposed parts. */
3755 stmt_name = "kernels"; break;
3756 default: gcc_unreachable ();
3758 switch (gimple_omp_target_kind (ctx->stmt))
3760 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3761 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3762 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3763 ctx_stmt_name = "parallel"; break;
3764 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3765 ctx_stmt_name = "kernels"; break;
3766 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3767 ctx_stmt_name = "serial"; break;
3768 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3769 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3770 ctx_stmt_name = "host_data"; break;
3771 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3772 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3773 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3774 /* OpenACC 'kernels' decomposed parts. */
3775 ctx_stmt_name = "kernels"; break;
3776 default: gcc_unreachable ();
3779 /* OpenACC/OpenMP mismatch? */
3780 if (is_gimple_omp_oacc (stmt)
3781 != is_gimple_omp_oacc (ctx->stmt))
3783 error_at (gimple_location (stmt),
3784 "%s %qs construct inside of %s %qs region",
3785 (is_gimple_omp_oacc (stmt)
3786 ? "OpenACC" : "OpenMP"), stmt_name,
3787 (is_gimple_omp_oacc (ctx->stmt)
3788 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3789 return false;
3791 if (is_gimple_omp_offloaded (ctx->stmt))
3793 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3794 if (is_gimple_omp_oacc (ctx->stmt))
3796 error_at (gimple_location (stmt),
3797 "%qs construct inside of %qs region",
3798 stmt_name, ctx_stmt_name);
3799 return false;
3801 else
3803 warning_at (gimple_location (stmt), 0,
3804 "%qs construct inside of %qs region",
3805 stmt_name, ctx_stmt_name);
3809 break;
3810 default:
3811 break;
3813 return true;
3817 /* Helper function scan_omp.
3819 Callback for walk_tree or operators in walk_gimple_stmt used to
3820 scan for OMP directives in TP. */
3822 static tree
3823 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3825 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3826 omp_context *ctx = (omp_context *) wi->info;
3827 tree t = *tp;
3829 switch (TREE_CODE (t))
3831 case VAR_DECL:
3832 case PARM_DECL:
3833 case LABEL_DECL:
3834 case RESULT_DECL:
3835 if (ctx)
3837 tree repl = remap_decl (t, &ctx->cb);
3838 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3839 *tp = repl;
3841 break;
3843 default:
3844 if (ctx && TYPE_P (t))
3845 *tp = remap_type (t, &ctx->cb);
3846 else if (!DECL_P (t))
3848 *walk_subtrees = 1;
3849 if (ctx)
3851 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3852 if (tem != TREE_TYPE (t))
3854 if (TREE_CODE (t) == INTEGER_CST)
3855 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3856 else
3857 TREE_TYPE (t) = tem;
3861 break;
3864 return NULL_TREE;
3867 /* Return true if FNDECL is a setjmp or a longjmp. */
3869 static bool
3870 setjmp_or_longjmp_p (const_tree fndecl)
3872 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3873 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3874 return true;
3876 tree declname = DECL_NAME (fndecl);
3877 if (!declname
3878 || (DECL_CONTEXT (fndecl) != NULL_TREE
3879 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3880 || !TREE_PUBLIC (fndecl))
3881 return false;
3883 const char *name = IDENTIFIER_POINTER (declname);
3884 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3887 /* Return true if FNDECL is an omp_* runtime API call. */
3889 static bool
3890 omp_runtime_api_call (const_tree fndecl)
3892 tree declname = DECL_NAME (fndecl);
3893 if (!declname
3894 || (DECL_CONTEXT (fndecl) != NULL_TREE
3895 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3896 || !TREE_PUBLIC (fndecl))
3897 return false;
3899 const char *name = IDENTIFIER_POINTER (declname);
3900 if (!startswith (name, "omp_"))
3901 return false;
3903 static const char *omp_runtime_apis[] =
3905 /* This array has 3 sections. First omp_* calls that don't
3906 have any suffixes. */
3907 "omp_alloc",
3908 "omp_free",
3909 "target_alloc",
3910 "target_associate_ptr",
3911 "target_disassociate_ptr",
3912 "target_free",
3913 "target_is_present",
3914 "target_memcpy",
3915 "target_memcpy_rect",
3916 NULL,
3917 /* Now omp_* calls that are available as omp_* and omp_*_. */
3918 "capture_affinity",
3919 "destroy_allocator",
3920 "destroy_lock",
3921 "destroy_nest_lock",
3922 "display_affinity",
3923 "fulfill_event",
3924 "get_active_level",
3925 "get_affinity_format",
3926 "get_cancellation",
3927 "get_default_allocator",
3928 "get_default_device",
3929 "get_device_num",
3930 "get_dynamic",
3931 "get_initial_device",
3932 "get_level",
3933 "get_max_active_levels",
3934 "get_max_task_priority",
3935 "get_max_threads",
3936 "get_nested",
3937 "get_num_devices",
3938 "get_num_places",
3939 "get_num_procs",
3940 "get_num_teams",
3941 "get_num_threads",
3942 "get_partition_num_places",
3943 "get_place_num",
3944 "get_proc_bind",
3945 "get_supported_active_levels",
3946 "get_team_num",
3947 "get_thread_limit",
3948 "get_thread_num",
3949 "get_wtick",
3950 "get_wtime",
3951 "in_final",
3952 "in_parallel",
3953 "init_lock",
3954 "init_nest_lock",
3955 "is_initial_device",
3956 "pause_resource",
3957 "pause_resource_all",
3958 "set_affinity_format",
3959 "set_default_allocator",
3960 "set_lock",
3961 "set_nest_lock",
3962 "test_lock",
3963 "test_nest_lock",
3964 "unset_lock",
3965 "unset_nest_lock",
3966 NULL,
3967 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3968 "display_env",
3969 "get_ancestor_thread_num",
3970 "init_allocator",
3971 "get_partition_place_nums",
3972 "get_place_num_procs",
3973 "get_place_proc_ids",
3974 "get_schedule",
3975 "get_team_size",
3976 "set_default_device",
3977 "set_dynamic",
3978 "set_max_active_levels",
3979 "set_nested",
3980 "set_num_threads",
3981 "set_schedule"
3984 int mode = 0;
3985 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3987 if (omp_runtime_apis[i] == NULL)
3989 mode++;
3990 continue;
3992 size_t len = strlen (omp_runtime_apis[i]);
3993 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3994 && (name[4 + len] == '\0'
3995 || (mode > 0
3996 && name[4 + len] == '_'
3997 && (name[4 + len + 1] == '\0'
3998 || (mode > 1
3999 && strcmp (name + 4 + len + 1, "8_") == 0)))))
4000 return true;
4002 return false;
4005 /* Helper function for scan_omp.
4007 Callback for walk_gimple_stmt used to scan for OMP directives in
4008 the current statement in GSI. */
4010 static tree
4011 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4012 struct walk_stmt_info *wi)
4014 gimple *stmt = gsi_stmt (*gsi);
4015 omp_context *ctx = (omp_context *) wi->info;
4017 if (gimple_has_location (stmt))
4018 input_location = gimple_location (stmt);
4020 /* Check the nesting restrictions. */
4021 bool remove = false;
4022 if (is_gimple_omp (stmt))
4023 remove = !check_omp_nesting_restrictions (stmt, ctx);
4024 else if (is_gimple_call (stmt))
4026 tree fndecl = gimple_call_fndecl (stmt);
4027 if (fndecl)
4029 if (ctx
4030 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4031 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4032 && setjmp_or_longjmp_p (fndecl)
4033 && !ctx->loop_p)
4035 remove = true;
4036 error_at (gimple_location (stmt),
4037 "setjmp/longjmp inside %<simd%> construct");
4039 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4040 switch (DECL_FUNCTION_CODE (fndecl))
4042 case BUILT_IN_GOMP_BARRIER:
4043 case BUILT_IN_GOMP_CANCEL:
4044 case BUILT_IN_GOMP_CANCELLATION_POINT:
4045 case BUILT_IN_GOMP_TASKYIELD:
4046 case BUILT_IN_GOMP_TASKWAIT:
4047 case BUILT_IN_GOMP_TASKGROUP_START:
4048 case BUILT_IN_GOMP_TASKGROUP_END:
4049 remove = !check_omp_nesting_restrictions (stmt, ctx);
4050 break;
4051 default:
4052 break;
4054 else if (ctx)
4056 omp_context *octx = ctx;
4057 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4058 octx = ctx->outer;
4059 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4061 remove = true;
4062 error_at (gimple_location (stmt),
4063 "OpenMP runtime API call %qD in a region with "
4064 "%<order(concurrent)%> clause", fndecl);
4069 if (remove)
4071 stmt = gimple_build_nop ();
4072 gsi_replace (gsi, stmt, false);
4075 *handled_ops_p = true;
4077 switch (gimple_code (stmt))
4079 case GIMPLE_OMP_PARALLEL:
4080 taskreg_nesting_level++;
4081 scan_omp_parallel (gsi, ctx);
4082 taskreg_nesting_level--;
4083 break;
4085 case GIMPLE_OMP_TASK:
4086 taskreg_nesting_level++;
4087 scan_omp_task (gsi, ctx);
4088 taskreg_nesting_level--;
4089 break;
4091 case GIMPLE_OMP_FOR:
4092 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4093 == GF_OMP_FOR_KIND_SIMD)
4094 && gimple_omp_for_combined_into_p (stmt)
4095 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4097 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4098 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4099 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4101 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4102 break;
4105 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4106 == GF_OMP_FOR_KIND_SIMD)
4107 && omp_maybe_offloaded_ctx (ctx)
4108 && omp_max_simt_vf ()
4109 && gimple_omp_for_collapse (stmt) == 1)
4110 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4111 else
4112 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4113 break;
4115 case GIMPLE_OMP_SCOPE:
4116 ctx = new_omp_context (stmt, ctx);
4117 scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4118 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4119 break;
4121 case GIMPLE_OMP_SECTIONS:
4122 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4123 break;
4125 case GIMPLE_OMP_SINGLE:
4126 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4127 break;
4129 case GIMPLE_OMP_SCAN:
4130 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4132 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4133 ctx->scan_inclusive = true;
4134 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4135 ctx->scan_exclusive = true;
4137 /* FALLTHRU */
4138 case GIMPLE_OMP_SECTION:
4139 case GIMPLE_OMP_MASTER:
4140 case GIMPLE_OMP_ORDERED:
4141 case GIMPLE_OMP_CRITICAL:
4142 ctx = new_omp_context (stmt, ctx);
4143 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4144 break;
4146 case GIMPLE_OMP_MASKED:
4147 ctx = new_omp_context (stmt, ctx);
4148 scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4149 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4150 break;
4152 case GIMPLE_OMP_TASKGROUP:
4153 ctx = new_omp_context (stmt, ctx);
4154 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4155 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4156 break;
4158 case GIMPLE_OMP_TARGET:
4159 if (is_gimple_omp_offloaded (stmt))
4161 taskreg_nesting_level++;
4162 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4163 taskreg_nesting_level--;
4165 else
4166 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4167 break;
4169 case GIMPLE_OMP_TEAMS:
4170 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4172 taskreg_nesting_level++;
4173 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4174 taskreg_nesting_level--;
4176 else
4177 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4178 break;
4180 case GIMPLE_BIND:
4182 tree var;
4184 *handled_ops_p = false;
4185 if (ctx)
4186 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4187 var ;
4188 var = DECL_CHAIN (var))
4189 insert_decl_map (&ctx->cb, var, var);
4191 break;
4192 default:
4193 *handled_ops_p = false;
4194 break;
4197 return NULL_TREE;
4201 /* Scan all the statements starting at the current statement. CTX
4202 contains context information about the OMP directives and
4203 clauses found during the scan. */
4205 static void
4206 scan_omp (gimple_seq *body_p, omp_context *ctx)
4208 location_t saved_location;
4209 struct walk_stmt_info wi;
4211 memset (&wi, 0, sizeof (wi));
4212 wi.info = ctx;
4213 wi.want_locations = true;
4215 saved_location = input_location;
4216 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4217 input_location = saved_location;
4220 /* Re-gimplification and code generation routines. */
4222 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4223 of BIND if in a method. */
4225 static void
4226 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4228 if (DECL_ARGUMENTS (current_function_decl)
4229 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4230 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4231 == POINTER_TYPE))
4233 tree vars = gimple_bind_vars (bind);
4234 for (tree *pvar = &vars; *pvar; )
4235 if (omp_member_access_dummy_var (*pvar))
4236 *pvar = DECL_CHAIN (*pvar);
4237 else
4238 pvar = &DECL_CHAIN (*pvar);
4239 gimple_bind_set_vars (bind, vars);
4243 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4244 block and its subblocks. */
4246 static void
4247 remove_member_access_dummy_vars (tree block)
4249 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4250 if (omp_member_access_dummy_var (*pvar))
4251 *pvar = DECL_CHAIN (*pvar);
4252 else
4253 pvar = &DECL_CHAIN (*pvar);
4255 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4256 remove_member_access_dummy_vars (block);
4259 /* If a context was created for STMT when it was scanned, return it. */
4261 static omp_context *
4262 maybe_lookup_ctx (gimple *stmt)
4264 splay_tree_node n;
4265 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4266 return n ? (omp_context *) n->value : NULL;
4270 /* Find the mapping for DECL in CTX or the immediately enclosing
4271 context that has a mapping for DECL.
4273 If CTX is a nested parallel directive, we may have to use the decl
4274 mappings created in CTX's parent context. Suppose that we have the
4275 following parallel nesting (variable UIDs showed for clarity):
4277 iD.1562 = 0;
4278 #omp parallel shared(iD.1562) -> outer parallel
4279 iD.1562 = iD.1562 + 1;
4281 #omp parallel shared (iD.1562) -> inner parallel
4282 iD.1562 = iD.1562 - 1;
4284 Each parallel structure will create a distinct .omp_data_s structure
4285 for copying iD.1562 in/out of the directive:
4287 outer parallel .omp_data_s.1.i -> iD.1562
4288 inner parallel .omp_data_s.2.i -> iD.1562
4290 A shared variable mapping will produce a copy-out operation before
4291 the parallel directive and a copy-in operation after it. So, in
4292 this case we would have:
4294 iD.1562 = 0;
4295 .omp_data_o.1.i = iD.1562;
4296 #omp parallel shared(iD.1562) -> outer parallel
4297 .omp_data_i.1 = &.omp_data_o.1
4298 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4300 .omp_data_o.2.i = iD.1562; -> **
4301 #omp parallel shared(iD.1562) -> inner parallel
4302 .omp_data_i.2 = &.omp_data_o.2
4303 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4306 ** This is a problem. The symbol iD.1562 cannot be referenced
4307 inside the body of the outer parallel region. But since we are
4308 emitting this copy operation while expanding the inner parallel
4309 directive, we need to access the CTX structure of the outer
4310 parallel directive to get the correct mapping:
4312 .omp_data_o.2.i = .omp_data_i.1->i
4314 Since there may be other workshare or parallel directives enclosing
4315 the parallel directive, it may be necessary to walk up the context
4316 parent chain. This is not a problem in general because nested
4317 parallelism happens only rarely. */
4319 static tree
4320 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4322 tree t;
4323 omp_context *up;
4325 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4326 t = maybe_lookup_decl (decl, up);
4328 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4330 return t ? t : decl;
4334 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4335 in outer contexts. */
4337 static tree
4338 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4340 tree t = NULL;
4341 omp_context *up;
4343 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4344 t = maybe_lookup_decl (decl, up);
4346 return t ? t : decl;
4350 /* Construct the initialization value for reduction operation OP. */
4352 tree
4353 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4355 switch (op)
4357 case PLUS_EXPR:
4358 case MINUS_EXPR:
4359 case BIT_IOR_EXPR:
4360 case BIT_XOR_EXPR:
4361 case TRUTH_OR_EXPR:
4362 case TRUTH_ORIF_EXPR:
4363 case TRUTH_XOR_EXPR:
4364 case NE_EXPR:
4365 return build_zero_cst (type);
4367 case MULT_EXPR:
4368 case TRUTH_AND_EXPR:
4369 case TRUTH_ANDIF_EXPR:
4370 case EQ_EXPR:
4371 return fold_convert_loc (loc, type, integer_one_node);
4373 case BIT_AND_EXPR:
4374 return fold_convert_loc (loc, type, integer_minus_one_node);
4376 case MAX_EXPR:
4377 if (SCALAR_FLOAT_TYPE_P (type))
4379 REAL_VALUE_TYPE max, min;
4380 if (HONOR_INFINITIES (type))
4382 real_inf (&max);
4383 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4385 else
4386 real_maxval (&min, 1, TYPE_MODE (type));
4387 return build_real (type, min);
4389 else if (POINTER_TYPE_P (type))
4391 wide_int min
4392 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4393 return wide_int_to_tree (type, min);
4395 else
4397 gcc_assert (INTEGRAL_TYPE_P (type));
4398 return TYPE_MIN_VALUE (type);
4401 case MIN_EXPR:
4402 if (SCALAR_FLOAT_TYPE_P (type))
4404 REAL_VALUE_TYPE max;
4405 if (HONOR_INFINITIES (type))
4406 real_inf (&max);
4407 else
4408 real_maxval (&max, 0, TYPE_MODE (type));
4409 return build_real (type, max);
4411 else if (POINTER_TYPE_P (type))
4413 wide_int max
4414 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4415 return wide_int_to_tree (type, max);
4417 else
4419 gcc_assert (INTEGRAL_TYPE_P (type));
4420 return TYPE_MAX_VALUE (type);
4423 default:
4424 gcc_unreachable ();
4428 /* Construct the initialization value for reduction CLAUSE. */
4430 tree
4431 omp_reduction_init (tree clause, tree type)
4433 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4434 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4437 /* Return alignment to be assumed for var in CLAUSE, which should be
4438 OMP_CLAUSE_ALIGNED. */
4440 static tree
4441 omp_clause_aligned_alignment (tree clause)
4443 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4444 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4446 /* Otherwise return implementation defined alignment. */
4447 unsigned int al = 1;
4448 opt_scalar_mode mode_iter;
4449 auto_vector_modes modes;
4450 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4451 static enum mode_class classes[]
4452 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4453 for (int i = 0; i < 4; i += 2)
4454 /* The for loop above dictates that we only walk through scalar classes. */
4455 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4457 scalar_mode mode = mode_iter.require ();
4458 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4459 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4460 continue;
4461 machine_mode alt_vmode;
4462 for (unsigned int j = 0; j < modes.length (); ++j)
4463 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4464 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4465 vmode = alt_vmode;
4467 tree type = lang_hooks.types.type_for_mode (mode, 1);
4468 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4469 continue;
4470 type = build_vector_type_for_mode (type, vmode);
4471 if (TYPE_MODE (type) != vmode)
4472 continue;
4473 if (TYPE_ALIGN_UNIT (type) > al)
4474 al = TYPE_ALIGN_UNIT (type);
4476 return build_int_cst (integer_type_node, al);
4480 /* This structure is part of the interface between lower_rec_simd_input_clauses
4481 and lower_rec_input_clauses. */
4483 class omplow_simd_context {
4484 public:
4485 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4486 tree idx;
4487 tree lane;
4488 tree lastlane;
4489 vec<tree, va_heap> simt_eargs;
4490 gimple_seq simt_dlist;
4491 poly_uint64_pod max_vf;
4492 bool is_simt;
4495 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4496 privatization. */
4498 static bool
4499 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4500 omplow_simd_context *sctx, tree &ivar,
4501 tree &lvar, tree *rvar = NULL,
4502 tree *rvar2 = NULL)
4504 if (known_eq (sctx->max_vf, 0U))
4506 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4507 if (maybe_gt (sctx->max_vf, 1U))
4509 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4510 OMP_CLAUSE_SAFELEN);
4511 if (c)
4513 poly_uint64 safe_len;
4514 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4515 || maybe_lt (safe_len, 1U))
4516 sctx->max_vf = 1;
4517 else
4518 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4521 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4523 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4524 c = OMP_CLAUSE_CHAIN (c))
4526 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4527 continue;
4529 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4531 /* UDR reductions are not supported yet for SIMT, disable
4532 SIMT. */
4533 sctx->max_vf = 1;
4534 break;
4537 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4538 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4540 /* Doing boolean operations on non-integral types is
4541 for conformance only, it's not worth supporting this
4542 for SIMT. */
4543 sctx->max_vf = 1;
4544 break;
4548 if (maybe_gt (sctx->max_vf, 1U))
4550 sctx->idx = create_tmp_var (unsigned_type_node);
4551 sctx->lane = create_tmp_var (unsigned_type_node);
4554 if (known_eq (sctx->max_vf, 1U))
4555 return false;
4557 if (sctx->is_simt)
4559 if (is_gimple_reg (new_var))
4561 ivar = lvar = new_var;
4562 return true;
4564 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4565 ivar = lvar = create_tmp_var (type);
4566 TREE_ADDRESSABLE (ivar) = 1;
4567 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4568 NULL, DECL_ATTRIBUTES (ivar));
4569 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4570 tree clobber = build_clobber (type);
4571 gimple *g = gimple_build_assign (ivar, clobber);
4572 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4574 else
4576 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4577 tree avar = create_tmp_var_raw (atype);
4578 if (TREE_ADDRESSABLE (new_var))
4579 TREE_ADDRESSABLE (avar) = 1;
4580 DECL_ATTRIBUTES (avar)
4581 = tree_cons (get_identifier ("omp simd array"), NULL,
4582 DECL_ATTRIBUTES (avar));
4583 gimple_add_tmp_var (avar);
4584 tree iavar = avar;
4585 if (rvar && !ctx->for_simd_scan_phase)
4587 /* For inscan reductions, create another array temporary,
4588 which will hold the reduced value. */
4589 iavar = create_tmp_var_raw (atype);
4590 if (TREE_ADDRESSABLE (new_var))
4591 TREE_ADDRESSABLE (iavar) = 1;
4592 DECL_ATTRIBUTES (iavar)
4593 = tree_cons (get_identifier ("omp simd array"), NULL,
4594 tree_cons (get_identifier ("omp simd inscan"), NULL,
4595 DECL_ATTRIBUTES (iavar)));
4596 gimple_add_tmp_var (iavar);
4597 ctx->cb.decl_map->put (avar, iavar);
4598 if (sctx->lastlane == NULL_TREE)
4599 sctx->lastlane = create_tmp_var (unsigned_type_node);
4600 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4601 sctx->lastlane, NULL_TREE, NULL_TREE);
4602 TREE_THIS_NOTRAP (*rvar) = 1;
4604 if (ctx->scan_exclusive)
4606 /* And for exclusive scan yet another one, which will
4607 hold the value during the scan phase. */
4608 tree savar = create_tmp_var_raw (atype);
4609 if (TREE_ADDRESSABLE (new_var))
4610 TREE_ADDRESSABLE (savar) = 1;
4611 DECL_ATTRIBUTES (savar)
4612 = tree_cons (get_identifier ("omp simd array"), NULL,
4613 tree_cons (get_identifier ("omp simd inscan "
4614 "exclusive"), NULL,
4615 DECL_ATTRIBUTES (savar)));
4616 gimple_add_tmp_var (savar);
4617 ctx->cb.decl_map->put (iavar, savar);
4618 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4619 sctx->idx, NULL_TREE, NULL_TREE);
4620 TREE_THIS_NOTRAP (*rvar2) = 1;
4623 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4624 NULL_TREE, NULL_TREE);
4625 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4626 NULL_TREE, NULL_TREE);
4627 TREE_THIS_NOTRAP (ivar) = 1;
4628 TREE_THIS_NOTRAP (lvar) = 1;
4630 if (DECL_P (new_var))
4632 SET_DECL_VALUE_EXPR (new_var, lvar);
4633 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4635 return true;
4638 /* Helper function of lower_rec_input_clauses. For a reference
4639 in simd reduction, add an underlying variable it will reference. */
4641 static void
4642 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4644 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4645 if (TREE_CONSTANT (z))
4647 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4648 get_name (new_vard));
4649 gimple_add_tmp_var (z);
4650 TREE_ADDRESSABLE (z) = 1;
4651 z = build_fold_addr_expr_loc (loc, z);
4652 gimplify_assign (new_vard, z, ilist);
4656 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4657 code to emit (type) (tskred_temp[idx]). */
4659 static tree
4660 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4661 unsigned idx)
4663 unsigned HOST_WIDE_INT sz
4664 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4665 tree r = build2 (MEM_REF, pointer_sized_int_node,
4666 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4667 idx * sz));
4668 tree v = create_tmp_var (pointer_sized_int_node);
4669 gimple *g = gimple_build_assign (v, r);
4670 gimple_seq_add_stmt (ilist, g);
4671 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4673 v = create_tmp_var (type);
4674 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4675 gimple_seq_add_stmt (ilist, g);
4677 return v;
4680 /* Lower early initialization of privatized variable NEW_VAR
4681 if it needs an allocator (has allocate clause). */
4683 static bool
4684 lower_private_allocate (tree var, tree new_var, tree &allocator,
4685 tree &allocate_ptr, gimple_seq *ilist,
4686 omp_context *ctx, bool is_ref, tree size)
4688 if (allocator)
4689 return false;
4690 gcc_assert (allocate_ptr == NULL_TREE);
4691 if (ctx->allocate_map
4692 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4693 if (tree *allocatorp = ctx->allocate_map->get (var))
4694 allocator = *allocatorp;
4695 if (allocator == NULL_TREE)
4696 return false;
4697 if (!is_ref && omp_is_reference (var))
4699 allocator = NULL_TREE;
4700 return false;
4703 if (TREE_CODE (allocator) != INTEGER_CST)
4704 allocator = build_outer_var_ref (allocator, ctx);
4705 allocator = fold_convert (pointer_sized_int_node, allocator);
4706 if (TREE_CODE (allocator) != INTEGER_CST)
4708 tree var = create_tmp_var (TREE_TYPE (allocator));
4709 gimplify_assign (var, allocator, ilist);
4710 allocator = var;
4713 tree ptr_type, align, sz = size;
4714 if (TYPE_P (new_var))
4716 ptr_type = build_pointer_type (new_var);
4717 align = build_int_cst (size_type_node, TYPE_ALIGN_UNIT (new_var));
4719 else if (is_ref)
4721 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4722 align = build_int_cst (size_type_node,
4723 TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4725 else
4727 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4728 align = build_int_cst (size_type_node, DECL_ALIGN_UNIT (new_var));
4729 if (sz == NULL_TREE)
4730 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4732 if (TREE_CODE (sz) != INTEGER_CST)
4734 tree szvar = create_tmp_var (size_type_node);
4735 gimplify_assign (szvar, sz, ilist);
4736 sz = szvar;
4738 allocate_ptr = create_tmp_var (ptr_type);
4739 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4740 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4741 gimple_call_set_lhs (g, allocate_ptr);
4742 gimple_seq_add_stmt (ilist, g);
4743 if (!is_ref)
4745 tree x = build_simple_mem_ref (allocate_ptr);
4746 TREE_THIS_NOTRAP (x) = 1;
4747 SET_DECL_VALUE_EXPR (new_var, x);
4748 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4750 return true;
4753 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4754 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4755 private variables. Initialization statements go in ILIST, while calls
4756 to destructors go in DLIST. */
4758 static void
4759 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4760 omp_context *ctx, struct omp_for_data *fd)
4762 tree c, copyin_seq, x, ptr;
4763 bool copyin_by_ref = false;
4764 bool lastprivate_firstprivate = false;
4765 bool reduction_omp_orig_ref = false;
4766 int pass;
4767 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4768 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4769 omplow_simd_context sctx = omplow_simd_context ();
4770 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4771 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4772 gimple_seq llist[4] = { };
4773 tree nonconst_simd_if = NULL_TREE;
4775 copyin_seq = NULL;
4776 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4778 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4779 with data sharing clauses referencing variable sized vars. That
4780 is unnecessarily hard to support and very unlikely to result in
4781 vectorized code anyway. */
4782 if (is_simd)
4783 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4784 switch (OMP_CLAUSE_CODE (c))
4786 case OMP_CLAUSE_LINEAR:
4787 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4788 sctx.max_vf = 1;
4789 /* FALLTHRU */
4790 case OMP_CLAUSE_PRIVATE:
4791 case OMP_CLAUSE_FIRSTPRIVATE:
4792 case OMP_CLAUSE_LASTPRIVATE:
4793 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4794 sctx.max_vf = 1;
4795 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4797 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4798 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4799 sctx.max_vf = 1;
4801 break;
4802 case OMP_CLAUSE_REDUCTION:
4803 case OMP_CLAUSE_IN_REDUCTION:
4804 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4805 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4806 sctx.max_vf = 1;
4807 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4809 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4810 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4811 sctx.max_vf = 1;
4813 break;
4814 case OMP_CLAUSE_IF:
4815 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4816 sctx.max_vf = 1;
4817 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4818 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4819 break;
4820 case OMP_CLAUSE_SIMDLEN:
4821 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4822 sctx.max_vf = 1;
4823 break;
4824 case OMP_CLAUSE__CONDTEMP_:
4825 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4826 if (sctx.is_simt)
4827 sctx.max_vf = 1;
4828 break;
4829 default:
4830 continue;
4833 /* Add a placeholder for simduid. */
4834 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4835 sctx.simt_eargs.safe_push (NULL_TREE);
4837 unsigned task_reduction_cnt = 0;
4838 unsigned task_reduction_cntorig = 0;
4839 unsigned task_reduction_cnt_full = 0;
4840 unsigned task_reduction_cntorig_full = 0;
4841 unsigned task_reduction_other_cnt = 0;
4842 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4843 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4844 /* Do all the fixed sized types in the first pass, and the variable sized
4845 types in the second pass. This makes sure that the scalar arguments to
4846 the variable sized types are processed before we use them in the
4847 variable sized operations. For task reductions we use 4 passes, in the
4848 first two we ignore them, in the third one gather arguments for
4849 GOMP_task_reduction_remap call and in the last pass actually handle
4850 the task reductions. */
4851 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4852 ? 4 : 2); ++pass)
4854 if (pass == 2 && task_reduction_cnt)
4856 tskred_atype
4857 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4858 + task_reduction_cntorig);
4859 tskred_avar = create_tmp_var_raw (tskred_atype);
4860 gimple_add_tmp_var (tskred_avar);
4861 TREE_ADDRESSABLE (tskred_avar) = 1;
4862 task_reduction_cnt_full = task_reduction_cnt;
4863 task_reduction_cntorig_full = task_reduction_cntorig;
4865 else if (pass == 3 && task_reduction_cnt)
4867 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4868 gimple *g
4869 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4870 size_int (task_reduction_cntorig),
4871 build_fold_addr_expr (tskred_avar));
4872 gimple_seq_add_stmt (ilist, g);
4874 if (pass == 3 && task_reduction_other_cnt)
4876 /* For reduction clauses, build
4877 tskred_base = (void *) tskred_temp[2]
4878 + omp_get_thread_num () * tskred_temp[1]
4879 or if tskred_temp[1] is known to be constant, that constant
4880 directly. This is the start of the private reduction copy block
4881 for the current thread. */
4882 tree v = create_tmp_var (integer_type_node);
4883 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4884 gimple *g = gimple_build_call (x, 0);
4885 gimple_call_set_lhs (g, v);
4886 gimple_seq_add_stmt (ilist, g);
4887 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4888 tskred_temp = OMP_CLAUSE_DECL (c);
4889 if (is_taskreg_ctx (ctx))
4890 tskred_temp = lookup_decl (tskred_temp, ctx);
4891 tree v2 = create_tmp_var (sizetype);
4892 g = gimple_build_assign (v2, NOP_EXPR, v);
4893 gimple_seq_add_stmt (ilist, g);
4894 if (ctx->task_reductions[0])
4895 v = fold_convert (sizetype, ctx->task_reductions[0]);
4896 else
4897 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4898 tree v3 = create_tmp_var (sizetype);
4899 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4900 gimple_seq_add_stmt (ilist, g);
4901 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4902 tskred_base = create_tmp_var (ptr_type_node);
4903 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4904 gimple_seq_add_stmt (ilist, g);
4906 task_reduction_cnt = 0;
4907 task_reduction_cntorig = 0;
4908 task_reduction_other_cnt = 0;
4909 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4911 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4912 tree var, new_var;
4913 bool by_ref;
4914 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4915 bool task_reduction_p = false;
4916 bool task_reduction_needs_orig_p = false;
4917 tree cond = NULL_TREE;
4918 tree allocator, allocate_ptr;
4920 switch (c_kind)
4922 case OMP_CLAUSE_PRIVATE:
4923 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4924 continue;
4925 break;
4926 case OMP_CLAUSE_SHARED:
4927 /* Ignore shared directives in teams construct inside
4928 of target construct. */
4929 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4930 && !is_host_teams_ctx (ctx))
4931 continue;
4932 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4934 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4935 || is_global_var (OMP_CLAUSE_DECL (c)));
4936 continue;
4938 case OMP_CLAUSE_FIRSTPRIVATE:
4939 case OMP_CLAUSE_COPYIN:
4940 break;
4941 case OMP_CLAUSE_LINEAR:
4942 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4943 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4944 lastprivate_firstprivate = true;
4945 break;
4946 case OMP_CLAUSE_REDUCTION:
4947 case OMP_CLAUSE_IN_REDUCTION:
4948 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
4949 || is_task_ctx (ctx)
4950 || OMP_CLAUSE_REDUCTION_TASK (c))
4952 task_reduction_p = true;
4953 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4955 task_reduction_other_cnt++;
4956 if (pass == 2)
4957 continue;
4959 else
4960 task_reduction_cnt++;
4961 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4963 var = OMP_CLAUSE_DECL (c);
4964 /* If var is a global variable that isn't privatized
4965 in outer contexts, we don't need to look up the
4966 original address, it is always the address of the
4967 global variable itself. */
4968 if (!DECL_P (var)
4969 || omp_is_reference (var)
4970 || !is_global_var
4971 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4973 task_reduction_needs_orig_p = true;
4974 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4975 task_reduction_cntorig++;
4979 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4980 reduction_omp_orig_ref = true;
4981 break;
4982 case OMP_CLAUSE__REDUCTEMP_:
4983 if (!is_taskreg_ctx (ctx))
4984 continue;
4985 /* FALLTHRU */
4986 case OMP_CLAUSE__LOOPTEMP_:
4987 /* Handle _looptemp_/_reductemp_ clauses only on
4988 parallel/task. */
4989 if (fd)
4990 continue;
4991 break;
4992 case OMP_CLAUSE_LASTPRIVATE:
4993 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4995 lastprivate_firstprivate = true;
4996 if (pass != 0 || is_taskloop_ctx (ctx))
4997 continue;
4999 /* Even without corresponding firstprivate, if
5000 decl is Fortran allocatable, it needs outer var
5001 reference. */
5002 else if (pass == 0
5003 && lang_hooks.decls.omp_private_outer_ref
5004 (OMP_CLAUSE_DECL (c)))
5005 lastprivate_firstprivate = true;
5006 break;
5007 case OMP_CLAUSE_ALIGNED:
5008 if (pass != 1)
5009 continue;
5010 var = OMP_CLAUSE_DECL (c);
5011 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5012 && !is_global_var (var))
5014 new_var = maybe_lookup_decl (var, ctx);
5015 if (new_var == NULL_TREE)
5016 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5017 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5018 tree alarg = omp_clause_aligned_alignment (c);
5019 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5020 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5021 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5022 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5023 gimplify_and_add (x, ilist);
5025 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5026 && is_global_var (var))
5028 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5029 new_var = lookup_decl (var, ctx);
5030 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5031 t = build_fold_addr_expr_loc (clause_loc, t);
5032 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5033 tree alarg = omp_clause_aligned_alignment (c);
5034 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5035 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5036 t = fold_convert_loc (clause_loc, ptype, t);
5037 x = create_tmp_var (ptype);
5038 t = build2 (MODIFY_EXPR, ptype, x, t);
5039 gimplify_and_add (t, ilist);
5040 t = build_simple_mem_ref_loc (clause_loc, x);
5041 SET_DECL_VALUE_EXPR (new_var, t);
5042 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5044 continue;
5045 case OMP_CLAUSE__CONDTEMP_:
5046 if (is_parallel_ctx (ctx)
5047 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5048 break;
5049 continue;
5050 default:
5051 continue;
5054 if (task_reduction_p != (pass >= 2))
5055 continue;
5057 allocator = NULL_TREE;
5058 allocate_ptr = NULL_TREE;
5059 new_var = var = OMP_CLAUSE_DECL (c);
5060 if ((c_kind == OMP_CLAUSE_REDUCTION
5061 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5062 && TREE_CODE (var) == MEM_REF)
5064 var = TREE_OPERAND (var, 0);
5065 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5066 var = TREE_OPERAND (var, 0);
5067 if (TREE_CODE (var) == INDIRECT_REF
5068 || TREE_CODE (var) == ADDR_EXPR)
5069 var = TREE_OPERAND (var, 0);
5070 if (is_variable_sized (var))
5072 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5073 var = DECL_VALUE_EXPR (var);
5074 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5075 var = TREE_OPERAND (var, 0);
5076 gcc_assert (DECL_P (var));
5078 new_var = var;
5080 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5082 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5083 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5085 else if (c_kind != OMP_CLAUSE_COPYIN)
5086 new_var = lookup_decl (var, ctx);
5088 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5090 if (pass != 0)
5091 continue;
5093 /* C/C++ array section reductions. */
5094 else if ((c_kind == OMP_CLAUSE_REDUCTION
5095 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5096 && var != OMP_CLAUSE_DECL (c))
5098 if (pass == 0)
5099 continue;
5101 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5102 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5104 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5106 tree b = TREE_OPERAND (orig_var, 1);
5107 if (is_omp_target (ctx->stmt))
5108 b = NULL_TREE;
5109 else
5110 b = maybe_lookup_decl (b, ctx);
5111 if (b == NULL)
5113 b = TREE_OPERAND (orig_var, 1);
5114 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5116 if (integer_zerop (bias))
5117 bias = b;
5118 else
5120 bias = fold_convert_loc (clause_loc,
5121 TREE_TYPE (b), bias);
5122 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5123 TREE_TYPE (b), b, bias);
5125 orig_var = TREE_OPERAND (orig_var, 0);
5127 if (pass == 2)
5129 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5130 if (is_global_var (out)
5131 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5132 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5133 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5134 != POINTER_TYPE)))
5135 x = var;
5136 else if (is_omp_target (ctx->stmt))
5137 x = out;
5138 else
5140 bool by_ref = use_pointer_for_field (var, NULL);
5141 x = build_receiver_ref (var, by_ref, ctx);
5142 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5143 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5144 == POINTER_TYPE))
5145 x = build_fold_addr_expr (x);
5147 if (TREE_CODE (orig_var) == INDIRECT_REF)
5148 x = build_simple_mem_ref (x);
5149 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5151 if (var == TREE_OPERAND (orig_var, 0))
5152 x = build_fold_addr_expr (x);
5154 bias = fold_convert (sizetype, bias);
5155 x = fold_convert (ptr_type_node, x);
5156 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5157 TREE_TYPE (x), x, bias);
5158 unsigned cnt = task_reduction_cnt - 1;
5159 if (!task_reduction_needs_orig_p)
5160 cnt += (task_reduction_cntorig_full
5161 - task_reduction_cntorig);
5162 else
5163 cnt = task_reduction_cntorig - 1;
5164 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5165 size_int (cnt), NULL_TREE, NULL_TREE);
5166 gimplify_assign (r, x, ilist);
5167 continue;
5170 if (TREE_CODE (orig_var) == INDIRECT_REF
5171 || TREE_CODE (orig_var) == ADDR_EXPR)
5172 orig_var = TREE_OPERAND (orig_var, 0);
5173 tree d = OMP_CLAUSE_DECL (c);
5174 tree type = TREE_TYPE (d);
5175 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5176 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5177 tree sz = v;
5178 const char *name = get_name (orig_var);
5179 if (pass != 3 && !TREE_CONSTANT (v))
5181 tree t;
5182 if (is_omp_target (ctx->stmt))
5183 t = NULL_TREE;
5184 else
5185 t = maybe_lookup_decl (v, ctx);
5186 if (t)
5187 v = t;
5188 else
5189 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5190 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5191 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5192 TREE_TYPE (v), v,
5193 build_int_cst (TREE_TYPE (v), 1));
5194 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5195 TREE_TYPE (v), t,
5196 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5198 if (pass == 3)
5200 tree xv = create_tmp_var (ptr_type_node);
5201 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5203 unsigned cnt = task_reduction_cnt - 1;
5204 if (!task_reduction_needs_orig_p)
5205 cnt += (task_reduction_cntorig_full
5206 - task_reduction_cntorig);
5207 else
5208 cnt = task_reduction_cntorig - 1;
5209 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5210 size_int (cnt), NULL_TREE, NULL_TREE);
5212 gimple *g = gimple_build_assign (xv, x);
5213 gimple_seq_add_stmt (ilist, g);
5215 else
5217 unsigned int idx = *ctx->task_reduction_map->get (c);
5218 tree off;
5219 if (ctx->task_reductions[1 + idx])
5220 off = fold_convert (sizetype,
5221 ctx->task_reductions[1 + idx]);
5222 else
5223 off = task_reduction_read (ilist, tskred_temp, sizetype,
5224 7 + 3 * idx + 1);
5225 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5226 tskred_base, off);
5227 gimple_seq_add_stmt (ilist, g);
5229 x = fold_convert (build_pointer_type (boolean_type_node),
5230 xv);
5231 if (TREE_CONSTANT (v))
5232 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5233 TYPE_SIZE_UNIT (type));
5234 else
5236 tree t;
5237 if (is_omp_target (ctx->stmt))
5238 t = NULL_TREE;
5239 else
5240 t = maybe_lookup_decl (v, ctx);
5241 if (t)
5242 v = t;
5243 else
5244 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5245 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5246 fb_rvalue);
5247 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5248 TREE_TYPE (v), v,
5249 build_int_cst (TREE_TYPE (v), 1));
5250 t = fold_build2_loc (clause_loc, MULT_EXPR,
5251 TREE_TYPE (v), t,
5252 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5253 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5255 cond = create_tmp_var (TREE_TYPE (x));
5256 gimplify_assign (cond, x, ilist);
5257 x = xv;
5259 else if (lower_private_allocate (var, type, allocator,
5260 allocate_ptr, ilist, ctx,
5261 true,
5262 TREE_CONSTANT (v)
5263 ? TYPE_SIZE_UNIT (type)
5264 : sz))
5265 x = allocate_ptr;
5266 else if (TREE_CONSTANT (v))
5268 x = create_tmp_var_raw (type, name);
5269 gimple_add_tmp_var (x);
5270 TREE_ADDRESSABLE (x) = 1;
5271 x = build_fold_addr_expr_loc (clause_loc, x);
5273 else
5275 tree atmp
5276 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5277 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5278 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5281 tree ptype = build_pointer_type (TREE_TYPE (type));
5282 x = fold_convert_loc (clause_loc, ptype, x);
5283 tree y = create_tmp_var (ptype, name);
5284 gimplify_assign (y, x, ilist);
5285 x = y;
5286 tree yb = y;
5288 if (!integer_zerop (bias))
5290 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5291 bias);
5292 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5294 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5295 pointer_sized_int_node, yb, bias);
5296 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5297 yb = create_tmp_var (ptype, name);
5298 gimplify_assign (yb, x, ilist);
5299 x = yb;
5302 d = TREE_OPERAND (d, 0);
5303 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5304 d = TREE_OPERAND (d, 0);
5305 if (TREE_CODE (d) == ADDR_EXPR)
5307 if (orig_var != var)
5309 gcc_assert (is_variable_sized (orig_var));
5310 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5312 gimplify_assign (new_var, x, ilist);
5313 tree new_orig_var = lookup_decl (orig_var, ctx);
5314 tree t = build_fold_indirect_ref (new_var);
5315 DECL_IGNORED_P (new_var) = 0;
5316 TREE_THIS_NOTRAP (t) = 1;
5317 SET_DECL_VALUE_EXPR (new_orig_var, t);
5318 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5320 else
5322 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5323 build_int_cst (ptype, 0));
5324 SET_DECL_VALUE_EXPR (new_var, x);
5325 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5328 else
5330 gcc_assert (orig_var == var);
5331 if (TREE_CODE (d) == INDIRECT_REF)
5333 x = create_tmp_var (ptype, name);
5334 TREE_ADDRESSABLE (x) = 1;
5335 gimplify_assign (x, yb, ilist);
5336 x = build_fold_addr_expr_loc (clause_loc, x);
5338 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5339 gimplify_assign (new_var, x, ilist);
5341 /* GOMP_taskgroup_reduction_register memsets the whole
5342 array to zero. If the initializer is zero, we don't
5343 need to initialize it again, just mark it as ever
5344 used unconditionally, i.e. cond = true. */
5345 if (cond
5346 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5347 && initializer_zerop (omp_reduction_init (c,
5348 TREE_TYPE (type))))
5350 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5351 boolean_true_node);
5352 gimple_seq_add_stmt (ilist, g);
5353 continue;
5355 tree end = create_artificial_label (UNKNOWN_LOCATION);
5356 if (cond)
5358 gimple *g;
5359 if (!is_parallel_ctx (ctx))
5361 tree condv = create_tmp_var (boolean_type_node);
5362 g = gimple_build_assign (condv,
5363 build_simple_mem_ref (cond));
5364 gimple_seq_add_stmt (ilist, g);
5365 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5366 g = gimple_build_cond (NE_EXPR, condv,
5367 boolean_false_node, end, lab1);
5368 gimple_seq_add_stmt (ilist, g);
5369 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5371 g = gimple_build_assign (build_simple_mem_ref (cond),
5372 boolean_true_node);
5373 gimple_seq_add_stmt (ilist, g);
5376 tree y1 = create_tmp_var (ptype);
5377 gimplify_assign (y1, y, ilist);
5378 tree i2 = NULL_TREE, y2 = NULL_TREE;
5379 tree body2 = NULL_TREE, end2 = NULL_TREE;
5380 tree y3 = NULL_TREE, y4 = NULL_TREE;
5381 if (task_reduction_needs_orig_p)
5383 y3 = create_tmp_var (ptype);
5384 tree ref;
5385 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5386 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5387 size_int (task_reduction_cnt_full
5388 + task_reduction_cntorig - 1),
5389 NULL_TREE, NULL_TREE);
5390 else
5392 unsigned int idx = *ctx->task_reduction_map->get (c);
5393 ref = task_reduction_read (ilist, tskred_temp, ptype,
5394 7 + 3 * idx);
5396 gimplify_assign (y3, ref, ilist);
5398 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5400 if (pass != 3)
5402 y2 = create_tmp_var (ptype);
5403 gimplify_assign (y2, y, ilist);
5405 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5407 tree ref = build_outer_var_ref (var, ctx);
5408 /* For ref build_outer_var_ref already performs this. */
5409 if (TREE_CODE (d) == INDIRECT_REF)
5410 gcc_assert (omp_is_reference (var));
5411 else if (TREE_CODE (d) == ADDR_EXPR)
5412 ref = build_fold_addr_expr (ref);
5413 else if (omp_is_reference (var))
5414 ref = build_fold_addr_expr (ref);
5415 ref = fold_convert_loc (clause_loc, ptype, ref);
5416 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5417 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5419 y3 = create_tmp_var (ptype);
5420 gimplify_assign (y3, unshare_expr (ref), ilist);
5422 if (is_simd)
5424 y4 = create_tmp_var (ptype);
5425 gimplify_assign (y4, ref, dlist);
5429 tree i = create_tmp_var (TREE_TYPE (v));
5430 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5431 tree body = create_artificial_label (UNKNOWN_LOCATION);
5432 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5433 if (y2)
5435 i2 = create_tmp_var (TREE_TYPE (v));
5436 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5437 body2 = create_artificial_label (UNKNOWN_LOCATION);
5438 end2 = create_artificial_label (UNKNOWN_LOCATION);
5439 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5441 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5443 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5444 tree decl_placeholder
5445 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5446 SET_DECL_VALUE_EXPR (decl_placeholder,
5447 build_simple_mem_ref (y1));
5448 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5449 SET_DECL_VALUE_EXPR (placeholder,
5450 y3 ? build_simple_mem_ref (y3)
5451 : error_mark_node);
5452 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5453 x = lang_hooks.decls.omp_clause_default_ctor
5454 (c, build_simple_mem_ref (y1),
5455 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5456 if (x)
5457 gimplify_and_add (x, ilist);
5458 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5460 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5461 lower_omp (&tseq, ctx);
5462 gimple_seq_add_seq (ilist, tseq);
5464 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5465 if (is_simd)
5467 SET_DECL_VALUE_EXPR (decl_placeholder,
5468 build_simple_mem_ref (y2));
5469 SET_DECL_VALUE_EXPR (placeholder,
5470 build_simple_mem_ref (y4));
5471 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5472 lower_omp (&tseq, ctx);
5473 gimple_seq_add_seq (dlist, tseq);
5474 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5476 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5477 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5478 if (y2)
5480 x = lang_hooks.decls.omp_clause_dtor
5481 (c, build_simple_mem_ref (y2));
5482 if (x)
5483 gimplify_and_add (x, dlist);
5486 else
5488 x = omp_reduction_init (c, TREE_TYPE (type));
5489 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5491 /* reduction(-:var) sums up the partial results, so it
5492 acts identically to reduction(+:var). */
5493 if (code == MINUS_EXPR)
5494 code = PLUS_EXPR;
5496 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5497 if (is_simd)
5499 x = build2 (code, TREE_TYPE (type),
5500 build_simple_mem_ref (y4),
5501 build_simple_mem_ref (y2));
5502 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5505 gimple *g
5506 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5507 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5508 gimple_seq_add_stmt (ilist, g);
5509 if (y3)
5511 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5512 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5513 gimple_seq_add_stmt (ilist, g);
5515 g = gimple_build_assign (i, PLUS_EXPR, i,
5516 build_int_cst (TREE_TYPE (i), 1));
5517 gimple_seq_add_stmt (ilist, g);
5518 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5519 gimple_seq_add_stmt (ilist, g);
5520 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5521 if (y2)
5523 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5524 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5525 gimple_seq_add_stmt (dlist, g);
5526 if (y4)
5528 g = gimple_build_assign
5529 (y4, POINTER_PLUS_EXPR, y4,
5530 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5531 gimple_seq_add_stmt (dlist, g);
5533 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5534 build_int_cst (TREE_TYPE (i2), 1));
5535 gimple_seq_add_stmt (dlist, g);
5536 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5537 gimple_seq_add_stmt (dlist, g);
5538 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5540 if (allocator)
5542 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5543 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5544 gimple_seq_add_stmt (dlist, g);
5546 continue;
5548 else if (pass == 2)
5550 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5551 if (is_global_var (out))
5552 x = var;
5553 else if (is_omp_target (ctx->stmt))
5554 x = out;
5555 else
5557 bool by_ref = use_pointer_for_field (var, ctx);
5558 x = build_receiver_ref (var, by_ref, ctx);
5560 if (!omp_is_reference (var))
5561 x = build_fold_addr_expr (x);
5562 x = fold_convert (ptr_type_node, x);
5563 unsigned cnt = task_reduction_cnt - 1;
5564 if (!task_reduction_needs_orig_p)
5565 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5566 else
5567 cnt = task_reduction_cntorig - 1;
5568 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5569 size_int (cnt), NULL_TREE, NULL_TREE);
5570 gimplify_assign (r, x, ilist);
5571 continue;
5573 else if (pass == 3)
5575 tree type = TREE_TYPE (new_var);
5576 if (!omp_is_reference (var))
5577 type = build_pointer_type (type);
5578 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5580 unsigned cnt = task_reduction_cnt - 1;
5581 if (!task_reduction_needs_orig_p)
5582 cnt += (task_reduction_cntorig_full
5583 - task_reduction_cntorig);
5584 else
5585 cnt = task_reduction_cntorig - 1;
5586 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5587 size_int (cnt), NULL_TREE, NULL_TREE);
5589 else
5591 unsigned int idx = *ctx->task_reduction_map->get (c);
5592 tree off;
5593 if (ctx->task_reductions[1 + idx])
5594 off = fold_convert (sizetype,
5595 ctx->task_reductions[1 + idx]);
5596 else
5597 off = task_reduction_read (ilist, tskred_temp, sizetype,
5598 7 + 3 * idx + 1);
5599 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5600 tskred_base, off);
5602 x = fold_convert (type, x);
5603 tree t;
5604 if (omp_is_reference (var))
5606 gimplify_assign (new_var, x, ilist);
5607 t = new_var;
5608 new_var = build_simple_mem_ref (new_var);
5610 else
5612 t = create_tmp_var (type);
5613 gimplify_assign (t, x, ilist);
5614 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5615 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5617 t = fold_convert (build_pointer_type (boolean_type_node), t);
5618 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5619 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5620 cond = create_tmp_var (TREE_TYPE (t));
5621 gimplify_assign (cond, t, ilist);
5623 else if (is_variable_sized (var))
5625 /* For variable sized types, we need to allocate the
5626 actual storage here. Call alloca and store the
5627 result in the pointer decl that we created elsewhere. */
5628 if (pass == 0)
5629 continue;
5631 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5633 tree tmp;
5635 ptr = DECL_VALUE_EXPR (new_var);
5636 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5637 ptr = TREE_OPERAND (ptr, 0);
5638 gcc_assert (DECL_P (ptr));
5639 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5641 if (lower_private_allocate (var, new_var, allocator,
5642 allocate_ptr, ilist, ctx,
5643 false, x))
5644 tmp = allocate_ptr;
5645 else
5647 /* void *tmp = __builtin_alloca */
5648 tree atmp
5649 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5650 gcall *stmt
5651 = gimple_build_call (atmp, 2, x,
5652 size_int (DECL_ALIGN (var)));
5653 cfun->calls_alloca = 1;
5654 tmp = create_tmp_var_raw (ptr_type_node);
5655 gimple_add_tmp_var (tmp);
5656 gimple_call_set_lhs (stmt, tmp);
5658 gimple_seq_add_stmt (ilist, stmt);
5661 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5662 gimplify_assign (ptr, x, ilist);
5665 else if (omp_is_reference (var)
5666 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5667 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5669 /* For references that are being privatized for Fortran,
5670 allocate new backing storage for the new pointer
5671 variable. This allows us to avoid changing all the
5672 code that expects a pointer to something that expects
5673 a direct variable. */
5674 if (pass == 0)
5675 continue;
5677 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5678 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5680 x = build_receiver_ref (var, false, ctx);
5681 if (ctx->allocate_map)
5682 if (tree *allocatep = ctx->allocate_map->get (var))
5684 allocator = *allocatep;
5685 if (TREE_CODE (allocator) != INTEGER_CST)
5686 allocator = build_outer_var_ref (allocator, ctx);
5687 allocator = fold_convert (pointer_sized_int_node,
5688 allocator);
5689 allocate_ptr = unshare_expr (x);
5691 if (allocator == NULL_TREE)
5692 x = build_fold_addr_expr_loc (clause_loc, x);
5694 else if (lower_private_allocate (var, new_var, allocator,
5695 allocate_ptr,
5696 ilist, ctx, true, x))
5697 x = allocate_ptr;
5698 else if (TREE_CONSTANT (x))
5700 /* For reduction in SIMD loop, defer adding the
5701 initialization of the reference, because if we decide
5702 to use SIMD array for it, the initilization could cause
5703 expansion ICE. Ditto for other privatization clauses. */
5704 if (is_simd)
5705 x = NULL_TREE;
5706 else
5708 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5709 get_name (var));
5710 gimple_add_tmp_var (x);
5711 TREE_ADDRESSABLE (x) = 1;
5712 x = build_fold_addr_expr_loc (clause_loc, x);
5715 else
5717 tree atmp
5718 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5719 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5720 tree al = size_int (TYPE_ALIGN (rtype));
5721 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5724 if (x)
5726 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5727 gimplify_assign (new_var, x, ilist);
5730 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5732 else if ((c_kind == OMP_CLAUSE_REDUCTION
5733 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5734 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5736 if (pass == 0)
5737 continue;
5739 else if (pass != 0)
5740 continue;
5742 switch (OMP_CLAUSE_CODE (c))
5744 case OMP_CLAUSE_SHARED:
5745 /* Ignore shared directives in teams construct inside
5746 target construct. */
5747 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5748 && !is_host_teams_ctx (ctx))
5749 continue;
5750 /* Shared global vars are just accessed directly. */
5751 if (is_global_var (new_var))
5752 break;
5753 /* For taskloop firstprivate/lastprivate, represented
5754 as firstprivate and shared clause on the task, new_var
5755 is the firstprivate var. */
5756 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5757 break;
5758 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5759 needs to be delayed until after fixup_child_record_type so
5760 that we get the correct type during the dereference. */
5761 by_ref = use_pointer_for_field (var, ctx);
5762 x = build_receiver_ref (var, by_ref, ctx);
5763 SET_DECL_VALUE_EXPR (new_var, x);
5764 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5766 /* ??? If VAR is not passed by reference, and the variable
5767 hasn't been initialized yet, then we'll get a warning for
5768 the store into the omp_data_s structure. Ideally, we'd be
5769 able to notice this and not store anything at all, but
5770 we're generating code too early. Suppress the warning. */
5771 if (!by_ref)
5772 suppress_warning (var, OPT_Wuninitialized);
5773 break;
5775 case OMP_CLAUSE__CONDTEMP_:
5776 if (is_parallel_ctx (ctx))
5778 x = build_receiver_ref (var, false, ctx);
5779 SET_DECL_VALUE_EXPR (new_var, x);
5780 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5782 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5784 x = build_zero_cst (TREE_TYPE (var));
5785 goto do_private;
5787 break;
5789 case OMP_CLAUSE_LASTPRIVATE:
5790 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5791 break;
5792 /* FALLTHRU */
5794 case OMP_CLAUSE_PRIVATE:
5795 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5796 x = build_outer_var_ref (var, ctx);
5797 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5799 if (is_task_ctx (ctx))
5800 x = build_receiver_ref (var, false, ctx);
5801 else
5802 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5804 else
5805 x = NULL;
5806 do_private:
5807 tree nx;
5808 bool copy_ctor;
5809 copy_ctor = false;
5810 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5811 ilist, ctx, false, NULL_TREE);
5812 nx = unshare_expr (new_var);
5813 if (is_simd
5814 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5815 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5816 copy_ctor = true;
5817 if (copy_ctor)
5818 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5819 else
5820 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5821 if (is_simd)
5823 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5824 if ((TREE_ADDRESSABLE (new_var) || nx || y
5825 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5826 && (gimple_omp_for_collapse (ctx->stmt) != 1
5827 || (gimple_omp_for_index (ctx->stmt, 0)
5828 != new_var)))
5829 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5830 || omp_is_reference (var))
5831 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5832 ivar, lvar))
5834 if (omp_is_reference (var))
5836 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5837 tree new_vard = TREE_OPERAND (new_var, 0);
5838 gcc_assert (DECL_P (new_vard));
5839 SET_DECL_VALUE_EXPR (new_vard,
5840 build_fold_addr_expr (lvar));
5841 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5844 if (nx)
5846 tree iv = unshare_expr (ivar);
5847 if (copy_ctor)
5848 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5850 else
5851 x = lang_hooks.decls.omp_clause_default_ctor (c,
5855 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5857 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5858 unshare_expr (ivar), x);
5859 nx = x;
5861 if (nx && x)
5862 gimplify_and_add (x, &llist[0]);
5863 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5864 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5866 tree v = new_var;
5867 if (!DECL_P (v))
5869 gcc_assert (TREE_CODE (v) == MEM_REF);
5870 v = TREE_OPERAND (v, 0);
5871 gcc_assert (DECL_P (v));
5873 v = *ctx->lastprivate_conditional_map->get (v);
5874 tree t = create_tmp_var (TREE_TYPE (v));
5875 tree z = build_zero_cst (TREE_TYPE (v));
5876 tree orig_v
5877 = build_outer_var_ref (var, ctx,
5878 OMP_CLAUSE_LASTPRIVATE);
5879 gimple_seq_add_stmt (dlist,
5880 gimple_build_assign (t, z));
5881 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5882 tree civar = DECL_VALUE_EXPR (v);
5883 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5884 civar = unshare_expr (civar);
5885 TREE_OPERAND (civar, 1) = sctx.idx;
5886 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5887 unshare_expr (civar));
5888 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5889 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5890 orig_v, unshare_expr (ivar)));
5891 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5892 civar);
5893 x = build3 (COND_EXPR, void_type_node, cond, x,
5894 void_node);
5895 gimple_seq tseq = NULL;
5896 gimplify_and_add (x, &tseq);
5897 if (ctx->outer)
5898 lower_omp (&tseq, ctx->outer);
5899 gimple_seq_add_seq (&llist[1], tseq);
5901 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5902 && ctx->for_simd_scan_phase)
5904 x = unshare_expr (ivar);
5905 tree orig_v
5906 = build_outer_var_ref (var, ctx,
5907 OMP_CLAUSE_LASTPRIVATE);
5908 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5909 orig_v);
5910 gimplify_and_add (x, &llist[0]);
5912 if (y)
5914 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5915 if (y)
5916 gimplify_and_add (y, &llist[1]);
5918 break;
5920 if (omp_is_reference (var))
5922 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5923 tree new_vard = TREE_OPERAND (new_var, 0);
5924 gcc_assert (DECL_P (new_vard));
5925 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5926 x = TYPE_SIZE_UNIT (type);
5927 if (TREE_CONSTANT (x))
5929 x = create_tmp_var_raw (type, get_name (var));
5930 gimple_add_tmp_var (x);
5931 TREE_ADDRESSABLE (x) = 1;
5932 x = build_fold_addr_expr_loc (clause_loc, x);
5933 x = fold_convert_loc (clause_loc,
5934 TREE_TYPE (new_vard), x);
5935 gimplify_assign (new_vard, x, ilist);
5939 if (nx)
5940 gimplify_and_add (nx, ilist);
5941 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5942 && is_simd
5943 && ctx->for_simd_scan_phase)
5945 tree orig_v = build_outer_var_ref (var, ctx,
5946 OMP_CLAUSE_LASTPRIVATE);
5947 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5948 orig_v);
5949 gimplify_and_add (x, ilist);
5951 /* FALLTHRU */
5953 do_dtor:
5954 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5955 if (x)
5956 gimplify_and_add (x, dlist);
5957 if (allocator)
5959 if (!is_gimple_val (allocator))
5961 tree avar = create_tmp_var (TREE_TYPE (allocator));
5962 gimplify_assign (avar, allocator, dlist);
5963 allocator = avar;
5965 if (!is_gimple_val (allocate_ptr))
5967 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
5968 gimplify_assign (apvar, allocate_ptr, dlist);
5969 allocate_ptr = apvar;
5971 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5972 gimple *g
5973 = gimple_build_call (f, 2, allocate_ptr, allocator);
5974 gimple_seq_add_stmt (dlist, g);
5976 break;
5978 case OMP_CLAUSE_LINEAR:
5979 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5980 goto do_firstprivate;
5981 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5982 x = NULL;
5983 else
5984 x = build_outer_var_ref (var, ctx);
5985 goto do_private;
5987 case OMP_CLAUSE_FIRSTPRIVATE:
5988 if (is_task_ctx (ctx))
5990 if ((omp_is_reference (var)
5991 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5992 || is_variable_sized (var))
5993 goto do_dtor;
5994 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5995 ctx))
5996 || use_pointer_for_field (var, NULL))
5998 x = build_receiver_ref (var, false, ctx);
5999 if (ctx->allocate_map)
6000 if (tree *allocatep = ctx->allocate_map->get (var))
6002 allocator = *allocatep;
6003 if (TREE_CODE (allocator) != INTEGER_CST)
6004 allocator = build_outer_var_ref (allocator, ctx);
6005 allocator = fold_convert (pointer_sized_int_node,
6006 allocator);
6007 allocate_ptr = unshare_expr (x);
6008 x = build_simple_mem_ref (x);
6009 TREE_THIS_NOTRAP (x) = 1;
6011 SET_DECL_VALUE_EXPR (new_var, x);
6012 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6013 goto do_dtor;
6016 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6017 && omp_is_reference (var))
6019 x = build_outer_var_ref (var, ctx);
6020 gcc_assert (TREE_CODE (x) == MEM_REF
6021 && integer_zerop (TREE_OPERAND (x, 1)));
6022 x = TREE_OPERAND (x, 0);
6023 x = lang_hooks.decls.omp_clause_copy_ctor
6024 (c, unshare_expr (new_var), x);
6025 gimplify_and_add (x, ilist);
6026 goto do_dtor;
6028 do_firstprivate:
6029 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6030 ilist, ctx, false, NULL_TREE);
6031 x = build_outer_var_ref (var, ctx);
6032 if (is_simd)
6034 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6035 && gimple_omp_for_combined_into_p (ctx->stmt))
6037 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6038 tree stept = TREE_TYPE (t);
6039 tree ct = omp_find_clause (clauses,
6040 OMP_CLAUSE__LOOPTEMP_);
6041 gcc_assert (ct);
6042 tree l = OMP_CLAUSE_DECL (ct);
6043 tree n1 = fd->loop.n1;
6044 tree step = fd->loop.step;
6045 tree itype = TREE_TYPE (l);
6046 if (POINTER_TYPE_P (itype))
6047 itype = signed_type_for (itype);
6048 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6049 if (TYPE_UNSIGNED (itype)
6050 && fd->loop.cond_code == GT_EXPR)
6051 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6052 fold_build1 (NEGATE_EXPR, itype, l),
6053 fold_build1 (NEGATE_EXPR,
6054 itype, step));
6055 else
6056 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6057 t = fold_build2 (MULT_EXPR, stept,
6058 fold_convert (stept, l), t);
6060 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6062 if (omp_is_reference (var))
6064 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6065 tree new_vard = TREE_OPERAND (new_var, 0);
6066 gcc_assert (DECL_P (new_vard));
6067 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6068 nx = TYPE_SIZE_UNIT (type);
6069 if (TREE_CONSTANT (nx))
6071 nx = create_tmp_var_raw (type,
6072 get_name (var));
6073 gimple_add_tmp_var (nx);
6074 TREE_ADDRESSABLE (nx) = 1;
6075 nx = build_fold_addr_expr_loc (clause_loc,
6076 nx);
6077 nx = fold_convert_loc (clause_loc,
6078 TREE_TYPE (new_vard),
6079 nx);
6080 gimplify_assign (new_vard, nx, ilist);
6084 x = lang_hooks.decls.omp_clause_linear_ctor
6085 (c, new_var, x, t);
6086 gimplify_and_add (x, ilist);
6087 goto do_dtor;
6090 if (POINTER_TYPE_P (TREE_TYPE (x)))
6091 x = fold_build2 (POINTER_PLUS_EXPR,
6092 TREE_TYPE (x), x, t);
6093 else
6094 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
6097 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6098 || TREE_ADDRESSABLE (new_var)
6099 || omp_is_reference (var))
6100 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6101 ivar, lvar))
6103 if (omp_is_reference (var))
6105 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6106 tree new_vard = TREE_OPERAND (new_var, 0);
6107 gcc_assert (DECL_P (new_vard));
6108 SET_DECL_VALUE_EXPR (new_vard,
6109 build_fold_addr_expr (lvar));
6110 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6112 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6114 tree iv = create_tmp_var (TREE_TYPE (new_var));
6115 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6116 gimplify_and_add (x, ilist);
6117 gimple_stmt_iterator gsi
6118 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6119 gassign *g
6120 = gimple_build_assign (unshare_expr (lvar), iv);
6121 gsi_insert_before_without_update (&gsi, g,
6122 GSI_SAME_STMT);
6123 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6124 enum tree_code code = PLUS_EXPR;
6125 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6126 code = POINTER_PLUS_EXPR;
6127 g = gimple_build_assign (iv, code, iv, t);
6128 gsi_insert_before_without_update (&gsi, g,
6129 GSI_SAME_STMT);
6130 break;
6132 x = lang_hooks.decls.omp_clause_copy_ctor
6133 (c, unshare_expr (ivar), x);
6134 gimplify_and_add (x, &llist[0]);
6135 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6136 if (x)
6137 gimplify_and_add (x, &llist[1]);
6138 break;
6140 if (omp_is_reference (var))
6142 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6143 tree new_vard = TREE_OPERAND (new_var, 0);
6144 gcc_assert (DECL_P (new_vard));
6145 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6146 nx = TYPE_SIZE_UNIT (type);
6147 if (TREE_CONSTANT (nx))
6149 nx = create_tmp_var_raw (type, get_name (var));
6150 gimple_add_tmp_var (nx);
6151 TREE_ADDRESSABLE (nx) = 1;
6152 nx = build_fold_addr_expr_loc (clause_loc, nx);
6153 nx = fold_convert_loc (clause_loc,
6154 TREE_TYPE (new_vard), nx);
6155 gimplify_assign (new_vard, nx, ilist);
6159 x = lang_hooks.decls.omp_clause_copy_ctor
6160 (c, unshare_expr (new_var), x);
6161 gimplify_and_add (x, ilist);
6162 goto do_dtor;
6164 case OMP_CLAUSE__LOOPTEMP_:
6165 case OMP_CLAUSE__REDUCTEMP_:
6166 gcc_assert (is_taskreg_ctx (ctx));
6167 x = build_outer_var_ref (var, ctx);
6168 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6169 gimplify_and_add (x, ilist);
6170 break;
6172 case OMP_CLAUSE_COPYIN:
6173 by_ref = use_pointer_for_field (var, NULL);
6174 x = build_receiver_ref (var, by_ref, ctx);
6175 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6176 append_to_statement_list (x, &copyin_seq);
6177 copyin_by_ref |= by_ref;
6178 break;
6180 case OMP_CLAUSE_REDUCTION:
6181 case OMP_CLAUSE_IN_REDUCTION:
6182 /* OpenACC reductions are initialized using the
6183 GOACC_REDUCTION internal function. */
6184 if (is_gimple_omp_oacc (ctx->stmt))
6185 break;
6186 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6188 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6189 gimple *tseq;
6190 tree ptype = TREE_TYPE (placeholder);
6191 if (cond)
6193 x = error_mark_node;
6194 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6195 && !task_reduction_needs_orig_p)
6196 x = var;
6197 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6199 tree pptype = build_pointer_type (ptype);
6200 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6201 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6202 size_int (task_reduction_cnt_full
6203 + task_reduction_cntorig - 1),
6204 NULL_TREE, NULL_TREE);
6205 else
6207 unsigned int idx
6208 = *ctx->task_reduction_map->get (c);
6209 x = task_reduction_read (ilist, tskred_temp,
6210 pptype, 7 + 3 * idx);
6212 x = fold_convert (pptype, x);
6213 x = build_simple_mem_ref (x);
6216 else
6218 lower_private_allocate (var, new_var, allocator,
6219 allocate_ptr, ilist, ctx, false,
6220 NULL_TREE);
6221 x = build_outer_var_ref (var, ctx);
6223 if (omp_is_reference (var)
6224 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6225 x = build_fold_addr_expr_loc (clause_loc, x);
6227 SET_DECL_VALUE_EXPR (placeholder, x);
6228 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6229 tree new_vard = new_var;
6230 if (omp_is_reference (var))
6232 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6233 new_vard = TREE_OPERAND (new_var, 0);
6234 gcc_assert (DECL_P (new_vard));
6236 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6237 if (is_simd
6238 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6239 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6240 rvarp = &rvar;
6241 if (is_simd
6242 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6243 ivar, lvar, rvarp,
6244 &rvar2))
6246 if (new_vard == new_var)
6248 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6249 SET_DECL_VALUE_EXPR (new_var, ivar);
6251 else
6253 SET_DECL_VALUE_EXPR (new_vard,
6254 build_fold_addr_expr (ivar));
6255 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6257 x = lang_hooks.decls.omp_clause_default_ctor
6258 (c, unshare_expr (ivar),
6259 build_outer_var_ref (var, ctx));
6260 if (rvarp && ctx->for_simd_scan_phase)
6262 if (x)
6263 gimplify_and_add (x, &llist[0]);
6264 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6265 if (x)
6266 gimplify_and_add (x, &llist[1]);
6267 break;
6269 else if (rvarp)
6271 if (x)
6273 gimplify_and_add (x, &llist[0]);
6275 tree ivar2 = unshare_expr (lvar);
6276 TREE_OPERAND (ivar2, 1) = sctx.idx;
6277 x = lang_hooks.decls.omp_clause_default_ctor
6278 (c, ivar2, build_outer_var_ref (var, ctx));
6279 gimplify_and_add (x, &llist[0]);
6281 if (rvar2)
6283 x = lang_hooks.decls.omp_clause_default_ctor
6284 (c, unshare_expr (rvar2),
6285 build_outer_var_ref (var, ctx));
6286 gimplify_and_add (x, &llist[0]);
6289 /* For types that need construction, add another
6290 private var which will be default constructed
6291 and optionally initialized with
6292 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6293 loop we want to assign this value instead of
6294 constructing and destructing it in each
6295 iteration. */
6296 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6297 gimple_add_tmp_var (nv);
6298 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6299 ? rvar2
6300 : ivar, 0),
6301 nv);
6302 x = lang_hooks.decls.omp_clause_default_ctor
6303 (c, nv, build_outer_var_ref (var, ctx));
6304 gimplify_and_add (x, ilist);
6306 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6308 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6309 x = DECL_VALUE_EXPR (new_vard);
6310 tree vexpr = nv;
6311 if (new_vard != new_var)
6312 vexpr = build_fold_addr_expr (nv);
6313 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6314 lower_omp (&tseq, ctx);
6315 SET_DECL_VALUE_EXPR (new_vard, x);
6316 gimple_seq_add_seq (ilist, tseq);
6317 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6320 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6321 if (x)
6322 gimplify_and_add (x, dlist);
6325 tree ref = build_outer_var_ref (var, ctx);
6326 x = unshare_expr (ivar);
6327 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6328 ref);
6329 gimplify_and_add (x, &llist[0]);
6331 ref = build_outer_var_ref (var, ctx);
6332 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6333 rvar);
6334 gimplify_and_add (x, &llist[3]);
6336 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6337 if (new_vard == new_var)
6338 SET_DECL_VALUE_EXPR (new_var, lvar);
6339 else
6340 SET_DECL_VALUE_EXPR (new_vard,
6341 build_fold_addr_expr (lvar));
6343 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6344 if (x)
6345 gimplify_and_add (x, &llist[1]);
6347 tree ivar2 = unshare_expr (lvar);
6348 TREE_OPERAND (ivar2, 1) = sctx.idx;
6349 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6350 if (x)
6351 gimplify_and_add (x, &llist[1]);
6353 if (rvar2)
6355 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6356 if (x)
6357 gimplify_and_add (x, &llist[1]);
6359 break;
6361 if (x)
6362 gimplify_and_add (x, &llist[0]);
6363 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6365 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6366 lower_omp (&tseq, ctx);
6367 gimple_seq_add_seq (&llist[0], tseq);
6369 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6370 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6371 lower_omp (&tseq, ctx);
6372 gimple_seq_add_seq (&llist[1], tseq);
6373 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6374 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6375 if (new_vard == new_var)
6376 SET_DECL_VALUE_EXPR (new_var, lvar);
6377 else
6378 SET_DECL_VALUE_EXPR (new_vard,
6379 build_fold_addr_expr (lvar));
6380 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6381 if (x)
6382 gimplify_and_add (x, &llist[1]);
6383 break;
6385 /* If this is a reference to constant size reduction var
6386 with placeholder, we haven't emitted the initializer
6387 for it because it is undesirable if SIMD arrays are used.
6388 But if they aren't used, we need to emit the deferred
6389 initialization now. */
6390 else if (omp_is_reference (var) && is_simd)
6391 handle_simd_reference (clause_loc, new_vard, ilist);
6393 tree lab2 = NULL_TREE;
6394 if (cond)
6396 gimple *g;
6397 if (!is_parallel_ctx (ctx))
6399 tree condv = create_tmp_var (boolean_type_node);
6400 tree m = build_simple_mem_ref (cond);
6401 g = gimple_build_assign (condv, m);
6402 gimple_seq_add_stmt (ilist, g);
6403 tree lab1
6404 = create_artificial_label (UNKNOWN_LOCATION);
6405 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6406 g = gimple_build_cond (NE_EXPR, condv,
6407 boolean_false_node,
6408 lab2, lab1);
6409 gimple_seq_add_stmt (ilist, g);
6410 gimple_seq_add_stmt (ilist,
6411 gimple_build_label (lab1));
6413 g = gimple_build_assign (build_simple_mem_ref (cond),
6414 boolean_true_node);
6415 gimple_seq_add_stmt (ilist, g);
6417 x = lang_hooks.decls.omp_clause_default_ctor
6418 (c, unshare_expr (new_var),
6419 cond ? NULL_TREE
6420 : build_outer_var_ref (var, ctx));
6421 if (x)
6422 gimplify_and_add (x, ilist);
6424 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6425 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6427 if (ctx->for_simd_scan_phase)
6428 goto do_dtor;
6429 if (x || (!is_simd
6430 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6432 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6433 gimple_add_tmp_var (nv);
6434 ctx->cb.decl_map->put (new_vard, nv);
6435 x = lang_hooks.decls.omp_clause_default_ctor
6436 (c, nv, build_outer_var_ref (var, ctx));
6437 if (x)
6438 gimplify_and_add (x, ilist);
6439 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6441 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6442 tree vexpr = nv;
6443 if (new_vard != new_var)
6444 vexpr = build_fold_addr_expr (nv);
6445 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6446 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6447 lower_omp (&tseq, ctx);
6448 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6449 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6450 gimple_seq_add_seq (ilist, tseq);
6452 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6453 if (is_simd && ctx->scan_exclusive)
6455 tree nv2
6456 = create_tmp_var_raw (TREE_TYPE (new_var));
6457 gimple_add_tmp_var (nv2);
6458 ctx->cb.decl_map->put (nv, nv2);
6459 x = lang_hooks.decls.omp_clause_default_ctor
6460 (c, nv2, build_outer_var_ref (var, ctx));
6461 gimplify_and_add (x, ilist);
6462 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6463 if (x)
6464 gimplify_and_add (x, dlist);
6466 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6467 if (x)
6468 gimplify_and_add (x, dlist);
6470 else if (is_simd
6471 && ctx->scan_exclusive
6472 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6474 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6475 gimple_add_tmp_var (nv2);
6476 ctx->cb.decl_map->put (new_vard, nv2);
6477 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6478 if (x)
6479 gimplify_and_add (x, dlist);
6481 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6482 goto do_dtor;
6485 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6487 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6488 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6489 && is_omp_target (ctx->stmt))
6491 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6492 tree oldv = NULL_TREE;
6493 gcc_assert (d);
6494 if (DECL_HAS_VALUE_EXPR_P (d))
6495 oldv = DECL_VALUE_EXPR (d);
6496 SET_DECL_VALUE_EXPR (d, new_vard);
6497 DECL_HAS_VALUE_EXPR_P (d) = 1;
6498 lower_omp (&tseq, ctx);
6499 if (oldv)
6500 SET_DECL_VALUE_EXPR (d, oldv);
6501 else
6503 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6504 DECL_HAS_VALUE_EXPR_P (d) = 0;
6507 else
6508 lower_omp (&tseq, ctx);
6509 gimple_seq_add_seq (ilist, tseq);
6511 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6512 if (is_simd)
6514 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6515 lower_omp (&tseq, ctx);
6516 gimple_seq_add_seq (dlist, tseq);
6517 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6519 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6520 if (cond)
6522 if (lab2)
6523 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6524 break;
6526 goto do_dtor;
6528 else
6530 x = omp_reduction_init (c, TREE_TYPE (new_var));
6531 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6532 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6534 if (cond)
6536 gimple *g;
6537 tree lab2 = NULL_TREE;
6538 /* GOMP_taskgroup_reduction_register memsets the whole
6539 array to zero. If the initializer is zero, we don't
6540 need to initialize it again, just mark it as ever
6541 used unconditionally, i.e. cond = true. */
6542 if (initializer_zerop (x))
6544 g = gimple_build_assign (build_simple_mem_ref (cond),
6545 boolean_true_node);
6546 gimple_seq_add_stmt (ilist, g);
6547 break;
6550 /* Otherwise, emit
6551 if (!cond) { cond = true; new_var = x; } */
6552 if (!is_parallel_ctx (ctx))
6554 tree condv = create_tmp_var (boolean_type_node);
6555 tree m = build_simple_mem_ref (cond);
6556 g = gimple_build_assign (condv, m);
6557 gimple_seq_add_stmt (ilist, g);
6558 tree lab1
6559 = create_artificial_label (UNKNOWN_LOCATION);
6560 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6561 g = gimple_build_cond (NE_EXPR, condv,
6562 boolean_false_node,
6563 lab2, lab1);
6564 gimple_seq_add_stmt (ilist, g);
6565 gimple_seq_add_stmt (ilist,
6566 gimple_build_label (lab1));
6568 g = gimple_build_assign (build_simple_mem_ref (cond),
6569 boolean_true_node);
6570 gimple_seq_add_stmt (ilist, g);
6571 gimplify_assign (new_var, x, ilist);
6572 if (lab2)
6573 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6574 break;
6577 /* reduction(-:var) sums up the partial results, so it
6578 acts identically to reduction(+:var). */
6579 if (code == MINUS_EXPR)
6580 code = PLUS_EXPR;
6582 bool is_truth_op
6583 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6584 tree new_vard = new_var;
6585 if (is_simd && omp_is_reference (var))
6587 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6588 new_vard = TREE_OPERAND (new_var, 0);
6589 gcc_assert (DECL_P (new_vard));
6591 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6592 if (is_simd
6593 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6594 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6595 rvarp = &rvar;
6596 if (is_simd
6597 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6598 ivar, lvar, rvarp,
6599 &rvar2))
6601 if (new_vard != new_var)
6603 SET_DECL_VALUE_EXPR (new_vard,
6604 build_fold_addr_expr (lvar));
6605 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6608 tree ref = build_outer_var_ref (var, ctx);
6610 if (rvarp)
6612 if (ctx->for_simd_scan_phase)
6613 break;
6614 gimplify_assign (ivar, ref, &llist[0]);
6615 ref = build_outer_var_ref (var, ctx);
6616 gimplify_assign (ref, rvar, &llist[3]);
6617 break;
6620 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6622 if (sctx.is_simt)
6624 if (!simt_lane)
6625 simt_lane = create_tmp_var (unsigned_type_node);
6626 x = build_call_expr_internal_loc
6627 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6628 TREE_TYPE (ivar), 2, ivar, simt_lane);
6629 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6630 gimplify_assign (ivar, x, &llist[2]);
6632 tree ivar2 = ivar;
6633 tree ref2 = ref;
6634 if (is_truth_op)
6636 tree zero = build_zero_cst (TREE_TYPE (ivar));
6637 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6638 boolean_type_node, ivar,
6639 zero);
6640 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6641 boolean_type_node, ref,
6642 zero);
6644 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6645 if (is_truth_op)
6646 x = fold_convert (TREE_TYPE (ref), x);
6647 ref = build_outer_var_ref (var, ctx);
6648 gimplify_assign (ref, x, &llist[1]);
6651 else
6653 lower_private_allocate (var, new_var, allocator,
6654 allocate_ptr, ilist, ctx,
6655 false, NULL_TREE);
6656 if (omp_is_reference (var) && is_simd)
6657 handle_simd_reference (clause_loc, new_vard, ilist);
6658 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6659 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6660 break;
6661 gimplify_assign (new_var, x, ilist);
6662 if (is_simd)
6664 tree ref = build_outer_var_ref (var, ctx);
6665 tree new_var2 = new_var;
6666 tree ref2 = ref;
6667 if (is_truth_op)
6669 tree zero = build_zero_cst (TREE_TYPE (new_var));
6670 new_var2
6671 = fold_build2_loc (clause_loc, NE_EXPR,
6672 boolean_type_node, new_var,
6673 zero);
6674 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6675 boolean_type_node, ref,
6676 zero);
6678 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6679 if (is_truth_op)
6680 x = fold_convert (TREE_TYPE (new_var), x);
6681 ref = build_outer_var_ref (var, ctx);
6682 gimplify_assign (ref, x, dlist);
6684 if (allocator)
6685 goto do_dtor;
6688 break;
6690 default:
6691 gcc_unreachable ();
6695 if (tskred_avar)
6697 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6698 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6701 if (known_eq (sctx.max_vf, 1U))
6703 sctx.is_simt = false;
6704 if (ctx->lastprivate_conditional_map)
6706 if (gimple_omp_for_combined_into_p (ctx->stmt))
6708 /* Signal to lower_omp_1 that it should use parent context. */
6709 ctx->combined_into_simd_safelen1 = true;
6710 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6711 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6712 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6714 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6715 omp_context *outer = ctx->outer;
6716 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6717 outer = outer->outer;
6718 tree *v = ctx->lastprivate_conditional_map->get (o);
6719 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6720 tree *pv = outer->lastprivate_conditional_map->get (po);
6721 *v = *pv;
6724 else
6726 /* When not vectorized, treat lastprivate(conditional:) like
6727 normal lastprivate, as there will be just one simd lane
6728 writing the privatized variable. */
6729 delete ctx->lastprivate_conditional_map;
6730 ctx->lastprivate_conditional_map = NULL;
6735 if (nonconst_simd_if)
6737 if (sctx.lane == NULL_TREE)
6739 sctx.idx = create_tmp_var (unsigned_type_node);
6740 sctx.lane = create_tmp_var (unsigned_type_node);
6742 /* FIXME: For now. */
6743 sctx.is_simt = false;
6746 if (sctx.lane || sctx.is_simt)
6748 uid = create_tmp_var (ptr_type_node, "simduid");
6749 /* Don't want uninit warnings on simduid, it is always uninitialized,
6750 but we use it not for the value, but for the DECL_UID only. */
6751 suppress_warning (uid, OPT_Wuninitialized);
6752 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6753 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6754 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6755 gimple_omp_for_set_clauses (ctx->stmt, c);
6757 /* Emit calls denoting privatized variables and initializing a pointer to
6758 structure that holds private variables as fields after ompdevlow pass. */
6759 if (sctx.is_simt)
6761 sctx.simt_eargs[0] = uid;
6762 gimple *g
6763 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6764 gimple_call_set_lhs (g, uid);
6765 gimple_seq_add_stmt (ilist, g);
6766 sctx.simt_eargs.release ();
6768 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6769 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6770 gimple_call_set_lhs (g, simtrec);
6771 gimple_seq_add_stmt (ilist, g);
6773 if (sctx.lane)
6775 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6776 2 + (nonconst_simd_if != NULL),
6777 uid, integer_zero_node,
6778 nonconst_simd_if);
6779 gimple_call_set_lhs (g, sctx.lane);
6780 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6781 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6782 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6783 build_int_cst (unsigned_type_node, 0));
6784 gimple_seq_add_stmt (ilist, g);
6785 if (sctx.lastlane)
6787 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6788 2, uid, sctx.lane);
6789 gimple_call_set_lhs (g, sctx.lastlane);
6790 gimple_seq_add_stmt (dlist, g);
6791 gimple_seq_add_seq (dlist, llist[3]);
6793 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6794 if (llist[2])
6796 tree simt_vf = create_tmp_var (unsigned_type_node);
6797 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6798 gimple_call_set_lhs (g, simt_vf);
6799 gimple_seq_add_stmt (dlist, g);
6801 tree t = build_int_cst (unsigned_type_node, 1);
6802 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6803 gimple_seq_add_stmt (dlist, g);
6805 t = build_int_cst (unsigned_type_node, 0);
6806 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6807 gimple_seq_add_stmt (dlist, g);
6809 tree body = create_artificial_label (UNKNOWN_LOCATION);
6810 tree header = create_artificial_label (UNKNOWN_LOCATION);
6811 tree end = create_artificial_label (UNKNOWN_LOCATION);
6812 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6813 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6815 gimple_seq_add_seq (dlist, llist[2]);
6817 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6818 gimple_seq_add_stmt (dlist, g);
6820 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6821 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6822 gimple_seq_add_stmt (dlist, g);
6824 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6826 for (int i = 0; i < 2; i++)
6827 if (llist[i])
6829 tree vf = create_tmp_var (unsigned_type_node);
6830 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6831 gimple_call_set_lhs (g, vf);
6832 gimple_seq *seq = i == 0 ? ilist : dlist;
6833 gimple_seq_add_stmt (seq, g);
6834 tree t = build_int_cst (unsigned_type_node, 0);
6835 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6836 gimple_seq_add_stmt (seq, g);
6837 tree body = create_artificial_label (UNKNOWN_LOCATION);
6838 tree header = create_artificial_label (UNKNOWN_LOCATION);
6839 tree end = create_artificial_label (UNKNOWN_LOCATION);
6840 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6841 gimple_seq_add_stmt (seq, gimple_build_label (body));
6842 gimple_seq_add_seq (seq, llist[i]);
6843 t = build_int_cst (unsigned_type_node, 1);
6844 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6845 gimple_seq_add_stmt (seq, g);
6846 gimple_seq_add_stmt (seq, gimple_build_label (header));
6847 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6848 gimple_seq_add_stmt (seq, g);
6849 gimple_seq_add_stmt (seq, gimple_build_label (end));
6852 if (sctx.is_simt)
6854 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6855 gimple *g
6856 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6857 gimple_seq_add_stmt (dlist, g);
6860 /* The copyin sequence is not to be executed by the main thread, since
6861 that would result in self-copies. Perhaps not visible to scalars,
6862 but it certainly is to C++ operator=. */
6863 if (copyin_seq)
6865 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6867 x = build2 (NE_EXPR, boolean_type_node, x,
6868 build_int_cst (TREE_TYPE (x), 0));
6869 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6870 gimplify_and_add (x, ilist);
6873 /* If any copyin variable is passed by reference, we must ensure the
6874 master thread doesn't modify it before it is copied over in all
6875 threads. Similarly for variables in both firstprivate and
6876 lastprivate clauses we need to ensure the lastprivate copying
6877 happens after firstprivate copying in all threads. And similarly
6878 for UDRs if initializer expression refers to omp_orig. */
6879 if (copyin_by_ref || lastprivate_firstprivate
6880 || (reduction_omp_orig_ref
6881 && !ctx->scan_inclusive
6882 && !ctx->scan_exclusive))
6884 /* Don't add any barrier for #pragma omp simd or
6885 #pragma omp distribute. */
6886 if (!is_task_ctx (ctx)
6887 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6888 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6889 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6892 /* If max_vf is non-zero, then we can use only a vectorization factor
6893 up to the max_vf we chose. So stick it into the safelen clause. */
6894 if (maybe_ne (sctx.max_vf, 0U))
6896 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6897 OMP_CLAUSE_SAFELEN);
6898 poly_uint64 safe_len;
6899 if (c == NULL_TREE
6900 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6901 && maybe_gt (safe_len, sctx.max_vf)))
6903 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6904 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6905 sctx.max_vf);
6906 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6907 gimple_omp_for_set_clauses (ctx->stmt, c);
6912 /* Create temporary variables for lastprivate(conditional:) implementation
6913 in context CTX with CLAUSES. */
6915 static void
6916 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6918 tree iter_type = NULL_TREE;
6919 tree cond_ptr = NULL_TREE;
6920 tree iter_var = NULL_TREE;
6921 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6922 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6923 tree next = *clauses;
6924 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6925 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6926 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6928 if (is_simd)
6930 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6931 gcc_assert (cc);
6932 if (iter_type == NULL_TREE)
6934 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6935 iter_var = create_tmp_var_raw (iter_type);
6936 DECL_CONTEXT (iter_var) = current_function_decl;
6937 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6938 DECL_CHAIN (iter_var) = ctx->block_vars;
6939 ctx->block_vars = iter_var;
6940 tree c3
6941 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6942 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6943 OMP_CLAUSE_DECL (c3) = iter_var;
6944 OMP_CLAUSE_CHAIN (c3) = *clauses;
6945 *clauses = c3;
6946 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6948 next = OMP_CLAUSE_CHAIN (cc);
6949 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6950 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6951 ctx->lastprivate_conditional_map->put (o, v);
6952 continue;
6954 if (iter_type == NULL)
6956 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6958 struct omp_for_data fd;
6959 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6960 NULL);
6961 iter_type = unsigned_type_for (fd.iter_type);
6963 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6964 iter_type = unsigned_type_node;
6965 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6966 if (c2)
6968 cond_ptr
6969 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6970 OMP_CLAUSE_DECL (c2) = cond_ptr;
6972 else
6974 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6975 DECL_CONTEXT (cond_ptr) = current_function_decl;
6976 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6977 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6978 ctx->block_vars = cond_ptr;
6979 c2 = build_omp_clause (UNKNOWN_LOCATION,
6980 OMP_CLAUSE__CONDTEMP_);
6981 OMP_CLAUSE_DECL (c2) = cond_ptr;
6982 OMP_CLAUSE_CHAIN (c2) = *clauses;
6983 *clauses = c2;
6985 iter_var = create_tmp_var_raw (iter_type);
6986 DECL_CONTEXT (iter_var) = current_function_decl;
6987 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6988 DECL_CHAIN (iter_var) = ctx->block_vars;
6989 ctx->block_vars = iter_var;
6990 tree c3
6991 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6992 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6993 OMP_CLAUSE_DECL (c3) = iter_var;
6994 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6995 OMP_CLAUSE_CHAIN (c2) = c3;
6996 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6998 tree v = create_tmp_var_raw (iter_type);
6999 DECL_CONTEXT (v) = current_function_decl;
7000 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7001 DECL_CHAIN (v) = ctx->block_vars;
7002 ctx->block_vars = v;
7003 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7004 ctx->lastprivate_conditional_map->put (o, v);
7009 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7010 both parallel and workshare constructs. PREDICATE may be NULL if it's
7011 always true. BODY_P is the sequence to insert early initialization
7012 if needed, STMT_LIST is where the non-conditional lastprivate handling
7013 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7014 section. */
7016 static void
7017 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7018 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7019 omp_context *ctx)
7021 tree x, c, label = NULL, orig_clauses = clauses;
7022 bool par_clauses = false;
7023 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7024 unsigned HOST_WIDE_INT conditional_off = 0;
7025 gimple_seq post_stmt_list = NULL;
7027 /* Early exit if there are no lastprivate or linear clauses. */
7028 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7029 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7030 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7031 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7032 break;
7033 if (clauses == NULL)
7035 /* If this was a workshare clause, see if it had been combined
7036 with its parallel. In that case, look for the clauses on the
7037 parallel statement itself. */
7038 if (is_parallel_ctx (ctx))
7039 return;
7041 ctx = ctx->outer;
7042 if (ctx == NULL || !is_parallel_ctx (ctx))
7043 return;
7045 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7046 OMP_CLAUSE_LASTPRIVATE);
7047 if (clauses == NULL)
7048 return;
7049 par_clauses = true;
7052 bool maybe_simt = false;
7053 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7054 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7056 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7057 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7058 if (simduid)
7059 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7062 if (predicate)
7064 gcond *stmt;
7065 tree label_true, arm1, arm2;
7066 enum tree_code pred_code = TREE_CODE (predicate);
7068 label = create_artificial_label (UNKNOWN_LOCATION);
7069 label_true = create_artificial_label (UNKNOWN_LOCATION);
7070 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7072 arm1 = TREE_OPERAND (predicate, 0);
7073 arm2 = TREE_OPERAND (predicate, 1);
7074 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7075 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7077 else
7079 arm1 = predicate;
7080 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7081 arm2 = boolean_false_node;
7082 pred_code = NE_EXPR;
7084 if (maybe_simt)
7086 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7087 c = fold_convert (integer_type_node, c);
7088 simtcond = create_tmp_var (integer_type_node);
7089 gimplify_assign (simtcond, c, stmt_list);
7090 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7091 1, simtcond);
7092 c = create_tmp_var (integer_type_node);
7093 gimple_call_set_lhs (g, c);
7094 gimple_seq_add_stmt (stmt_list, g);
7095 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7096 label_true, label);
7098 else
7099 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7100 gimple_seq_add_stmt (stmt_list, stmt);
7101 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7104 tree cond_ptr = NULL_TREE;
7105 for (c = clauses; c ;)
7107 tree var, new_var;
7108 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7109 gimple_seq *this_stmt_list = stmt_list;
7110 tree lab2 = NULL_TREE;
7112 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7113 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7114 && ctx->lastprivate_conditional_map
7115 && !ctx->combined_into_simd_safelen1)
7117 gcc_assert (body_p);
7118 if (simduid)
7119 goto next;
7120 if (cond_ptr == NULL_TREE)
7122 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7123 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7125 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7126 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7127 tree v = *ctx->lastprivate_conditional_map->get (o);
7128 gimplify_assign (v, build_zero_cst (type), body_p);
7129 this_stmt_list = cstmt_list;
7130 tree mem;
7131 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7133 mem = build2 (MEM_REF, type, cond_ptr,
7134 build_int_cst (TREE_TYPE (cond_ptr),
7135 conditional_off));
7136 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7138 else
7139 mem = build4 (ARRAY_REF, type, cond_ptr,
7140 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7141 tree mem2 = copy_node (mem);
7142 gimple_seq seq = NULL;
7143 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7144 gimple_seq_add_seq (this_stmt_list, seq);
7145 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7146 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7147 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7148 gimple_seq_add_stmt (this_stmt_list, g);
7149 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7150 gimplify_assign (mem2, v, this_stmt_list);
7152 else if (predicate
7153 && ctx->combined_into_simd_safelen1
7154 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7155 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7156 && ctx->lastprivate_conditional_map)
7157 this_stmt_list = &post_stmt_list;
7159 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7160 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7161 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7163 var = OMP_CLAUSE_DECL (c);
7164 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7165 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7166 && is_taskloop_ctx (ctx))
7168 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7169 new_var = lookup_decl (var, ctx->outer);
7171 else
7173 new_var = lookup_decl (var, ctx);
7174 /* Avoid uninitialized warnings for lastprivate and
7175 for linear iterators. */
7176 if (predicate
7177 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7178 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7179 suppress_warning (new_var, OPT_Wuninitialized);
7182 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7184 tree val = DECL_VALUE_EXPR (new_var);
7185 if (TREE_CODE (val) == ARRAY_REF
7186 && VAR_P (TREE_OPERAND (val, 0))
7187 && lookup_attribute ("omp simd array",
7188 DECL_ATTRIBUTES (TREE_OPERAND (val,
7189 0))))
7191 if (lastlane == NULL)
7193 lastlane = create_tmp_var (unsigned_type_node);
7194 gcall *g
7195 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7196 2, simduid,
7197 TREE_OPERAND (val, 1));
7198 gimple_call_set_lhs (g, lastlane);
7199 gimple_seq_add_stmt (this_stmt_list, g);
7201 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7202 TREE_OPERAND (val, 0), lastlane,
7203 NULL_TREE, NULL_TREE);
7204 TREE_THIS_NOTRAP (new_var) = 1;
7207 else if (maybe_simt)
7209 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7210 ? DECL_VALUE_EXPR (new_var)
7211 : new_var);
7212 if (simtlast == NULL)
7214 simtlast = create_tmp_var (unsigned_type_node);
7215 gcall *g = gimple_build_call_internal
7216 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7217 gimple_call_set_lhs (g, simtlast);
7218 gimple_seq_add_stmt (this_stmt_list, g);
7220 x = build_call_expr_internal_loc
7221 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7222 TREE_TYPE (val), 2, val, simtlast);
7223 new_var = unshare_expr (new_var);
7224 gimplify_assign (new_var, x, this_stmt_list);
7225 new_var = unshare_expr (new_var);
7228 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7229 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7231 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7232 gimple_seq_add_seq (this_stmt_list,
7233 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7234 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7236 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7237 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7239 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7240 gimple_seq_add_seq (this_stmt_list,
7241 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7242 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7245 x = NULL_TREE;
7246 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7247 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7248 && is_taskloop_ctx (ctx))
7250 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7251 ctx->outer->outer);
7252 if (is_global_var (ovar))
7253 x = ovar;
7255 if (!x)
7256 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7257 if (omp_is_reference (var))
7258 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7259 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7260 gimplify_and_add (x, this_stmt_list);
7262 if (lab2)
7263 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7266 next:
7267 c = OMP_CLAUSE_CHAIN (c);
7268 if (c == NULL && !par_clauses)
7270 /* If this was a workshare clause, see if it had been combined
7271 with its parallel. In that case, continue looking for the
7272 clauses also on the parallel statement itself. */
7273 if (is_parallel_ctx (ctx))
7274 break;
7276 ctx = ctx->outer;
7277 if (ctx == NULL || !is_parallel_ctx (ctx))
7278 break;
7280 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7281 OMP_CLAUSE_LASTPRIVATE);
7282 par_clauses = true;
7286 if (label)
7287 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7288 gimple_seq_add_seq (stmt_list, post_stmt_list);
7291 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7292 (which might be a placeholder). INNER is true if this is an inner
7293 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7294 join markers. Generate the before-loop forking sequence in
7295 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7296 general form of these sequences is
7298 GOACC_REDUCTION_SETUP
7299 GOACC_FORK
7300 GOACC_REDUCTION_INIT
7302 GOACC_REDUCTION_FINI
7303 GOACC_JOIN
7304 GOACC_REDUCTION_TEARDOWN. */
7306 static void
7307 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7308 gcall *fork, gcall *private_marker, gcall *join,
7309 gimple_seq *fork_seq, gimple_seq *join_seq,
7310 omp_context *ctx)
7312 gimple_seq before_fork = NULL;
7313 gimple_seq after_fork = NULL;
7314 gimple_seq before_join = NULL;
7315 gimple_seq after_join = NULL;
7316 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7317 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7318 unsigned offset = 0;
7320 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7321 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7323 /* No 'reduction' clauses on OpenACC 'kernels'. */
7324 gcc_checking_assert (!is_oacc_kernels (ctx));
7325 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7326 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7328 tree orig = OMP_CLAUSE_DECL (c);
7329 tree var = maybe_lookup_decl (orig, ctx);
7330 tree ref_to_res = NULL_TREE;
7331 tree incoming, outgoing, v1, v2, v3;
7332 bool is_private = false;
7334 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7335 if (rcode == MINUS_EXPR)
7336 rcode = PLUS_EXPR;
7337 else if (rcode == TRUTH_ANDIF_EXPR)
7338 rcode = BIT_AND_EXPR;
7339 else if (rcode == TRUTH_ORIF_EXPR)
7340 rcode = BIT_IOR_EXPR;
7341 tree op = build_int_cst (unsigned_type_node, rcode);
7343 if (!var)
7344 var = orig;
7346 incoming = outgoing = var;
7348 if (!inner)
7350 /* See if an outer construct also reduces this variable. */
7351 omp_context *outer = ctx;
7353 while (omp_context *probe = outer->outer)
7355 enum gimple_code type = gimple_code (probe->stmt);
7356 tree cls;
7358 switch (type)
7360 case GIMPLE_OMP_FOR:
7361 cls = gimple_omp_for_clauses (probe->stmt);
7362 break;
7364 case GIMPLE_OMP_TARGET:
7365 /* No 'reduction' clauses inside OpenACC 'kernels'
7366 regions. */
7367 gcc_checking_assert (!is_oacc_kernels (probe));
7369 if (!is_gimple_omp_offloaded (probe->stmt))
7370 goto do_lookup;
7372 cls = gimple_omp_target_clauses (probe->stmt);
7373 break;
7375 default:
7376 goto do_lookup;
7379 outer = probe;
7380 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7381 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7382 && orig == OMP_CLAUSE_DECL (cls))
7384 incoming = outgoing = lookup_decl (orig, probe);
7385 goto has_outer_reduction;
7387 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7388 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7389 && orig == OMP_CLAUSE_DECL (cls))
7391 is_private = true;
7392 goto do_lookup;
7396 do_lookup:
7397 /* This is the outermost construct with this reduction,
7398 see if there's a mapping for it. */
7399 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7400 && maybe_lookup_field (orig, outer) && !is_private)
7402 ref_to_res = build_receiver_ref (orig, false, outer);
7403 if (omp_is_reference (orig))
7404 ref_to_res = build_simple_mem_ref (ref_to_res);
7406 tree type = TREE_TYPE (var);
7407 if (POINTER_TYPE_P (type))
7408 type = TREE_TYPE (type);
7410 outgoing = var;
7411 incoming = omp_reduction_init_op (loc, rcode, type);
7413 else
7415 /* Try to look at enclosing contexts for reduction var,
7416 use original if no mapping found. */
7417 tree t = NULL_TREE;
7418 omp_context *c = ctx->outer;
7419 while (c && !t)
7421 t = maybe_lookup_decl (orig, c);
7422 c = c->outer;
7424 incoming = outgoing = (t ? t : orig);
7427 has_outer_reduction:;
7430 if (!ref_to_res)
7431 ref_to_res = integer_zero_node;
7433 if (omp_is_reference (orig))
7435 tree type = TREE_TYPE (var);
7436 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7438 if (!inner)
7440 tree x = create_tmp_var (TREE_TYPE (type), id);
7441 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7444 v1 = create_tmp_var (type, id);
7445 v2 = create_tmp_var (type, id);
7446 v3 = create_tmp_var (type, id);
7448 gimplify_assign (v1, var, fork_seq);
7449 gimplify_assign (v2, var, fork_seq);
7450 gimplify_assign (v3, var, fork_seq);
7452 var = build_simple_mem_ref (var);
7453 v1 = build_simple_mem_ref (v1);
7454 v2 = build_simple_mem_ref (v2);
7455 v3 = build_simple_mem_ref (v3);
7456 outgoing = build_simple_mem_ref (outgoing);
7458 if (!TREE_CONSTANT (incoming))
7459 incoming = build_simple_mem_ref (incoming);
7461 else
7462 v1 = v2 = v3 = var;
7464 /* Determine position in reduction buffer, which may be used
7465 by target. The parser has ensured that this is not a
7466 variable-sized type. */
7467 fixed_size_mode mode
7468 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7469 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7470 offset = (offset + align - 1) & ~(align - 1);
7471 tree off = build_int_cst (sizetype, offset);
7472 offset += GET_MODE_SIZE (mode);
7474 if (!init_code)
7476 init_code = build_int_cst (integer_type_node,
7477 IFN_GOACC_REDUCTION_INIT);
7478 fini_code = build_int_cst (integer_type_node,
7479 IFN_GOACC_REDUCTION_FINI);
7480 setup_code = build_int_cst (integer_type_node,
7481 IFN_GOACC_REDUCTION_SETUP);
7482 teardown_code = build_int_cst (integer_type_node,
7483 IFN_GOACC_REDUCTION_TEARDOWN);
7486 tree setup_call
7487 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7488 TREE_TYPE (var), 6, setup_code,
7489 unshare_expr (ref_to_res),
7490 incoming, level, op, off);
7491 tree init_call
7492 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7493 TREE_TYPE (var), 6, init_code,
7494 unshare_expr (ref_to_res),
7495 v1, level, op, off);
7496 tree fini_call
7497 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7498 TREE_TYPE (var), 6, fini_code,
7499 unshare_expr (ref_to_res),
7500 v2, level, op, off);
7501 tree teardown_call
7502 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7503 TREE_TYPE (var), 6, teardown_code,
7504 ref_to_res, v3, level, op, off);
7506 gimplify_assign (v1, setup_call, &before_fork);
7507 gimplify_assign (v2, init_call, &after_fork);
7508 gimplify_assign (v3, fini_call, &before_join);
7509 gimplify_assign (outgoing, teardown_call, &after_join);
7512 /* Now stitch things together. */
7513 gimple_seq_add_seq (fork_seq, before_fork);
7514 if (private_marker)
7515 gimple_seq_add_stmt (fork_seq, private_marker);
7516 if (fork)
7517 gimple_seq_add_stmt (fork_seq, fork);
7518 gimple_seq_add_seq (fork_seq, after_fork);
7520 gimple_seq_add_seq (join_seq, before_join);
7521 if (join)
7522 gimple_seq_add_stmt (join_seq, join);
7523 gimple_seq_add_seq (join_seq, after_join);
7526 /* Generate code to implement the REDUCTION clauses, append it
7527 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7528 that should be emitted also inside of the critical section,
7529 in that case clear *CLIST afterwards, otherwise leave it as is
7530 and let the caller emit it itself. */
7532 static void
7533 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7534 gimple_seq *clist, omp_context *ctx)
7536 gimple_seq sub_seq = NULL;
7537 gimple *stmt;
7538 tree x, c;
7539 int count = 0;
7541 /* OpenACC loop reductions are handled elsewhere. */
7542 if (is_gimple_omp_oacc (ctx->stmt))
7543 return;
7545 /* SIMD reductions are handled in lower_rec_input_clauses. */
7546 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7547 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7548 return;
7550 /* inscan reductions are handled elsewhere. */
7551 if (ctx->scan_inclusive || ctx->scan_exclusive)
7552 return;
7554 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7555 update in that case, otherwise use a lock. */
7556 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7557 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7558 && !OMP_CLAUSE_REDUCTION_TASK (c))
7560 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7561 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7563 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7564 count = -1;
7565 break;
7567 count++;
7570 if (count == 0)
7571 return;
7573 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7575 tree var, ref, new_var, orig_var;
7576 enum tree_code code;
7577 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7579 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7580 || OMP_CLAUSE_REDUCTION_TASK (c))
7581 continue;
7583 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7584 orig_var = var = OMP_CLAUSE_DECL (c);
7585 if (TREE_CODE (var) == MEM_REF)
7587 var = TREE_OPERAND (var, 0);
7588 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7589 var = TREE_OPERAND (var, 0);
7590 if (TREE_CODE (var) == ADDR_EXPR)
7591 var = TREE_OPERAND (var, 0);
7592 else
7594 /* If this is a pointer or referenced based array
7595 section, the var could be private in the outer
7596 context e.g. on orphaned loop construct. Pretend this
7597 is private variable's outer reference. */
7598 ccode = OMP_CLAUSE_PRIVATE;
7599 if (TREE_CODE (var) == INDIRECT_REF)
7600 var = TREE_OPERAND (var, 0);
7602 orig_var = var;
7603 if (is_variable_sized (var))
7605 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7606 var = DECL_VALUE_EXPR (var);
7607 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7608 var = TREE_OPERAND (var, 0);
7609 gcc_assert (DECL_P (var));
7612 new_var = lookup_decl (var, ctx);
7613 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
7614 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7615 ref = build_outer_var_ref (var, ctx, ccode);
7616 code = OMP_CLAUSE_REDUCTION_CODE (c);
7618 /* reduction(-:var) sums up the partial results, so it acts
7619 identically to reduction(+:var). */
7620 if (code == MINUS_EXPR)
7621 code = PLUS_EXPR;
7623 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7624 if (count == 1)
7626 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7628 addr = save_expr (addr);
7629 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7630 tree new_var2 = new_var;
7631 tree ref2 = ref;
7632 if (is_truth_op)
7634 tree zero = build_zero_cst (TREE_TYPE (new_var));
7635 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7636 boolean_type_node, new_var, zero);
7637 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7638 ref, zero);
7640 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7641 new_var2);
7642 if (is_truth_op)
7643 x = fold_convert (TREE_TYPE (new_var), x);
7644 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7645 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7646 gimplify_and_add (x, stmt_seqp);
7647 return;
7649 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7651 tree d = OMP_CLAUSE_DECL (c);
7652 tree type = TREE_TYPE (d);
7653 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7654 tree i = create_tmp_var (TREE_TYPE (v));
7655 tree ptype = build_pointer_type (TREE_TYPE (type));
7656 tree bias = TREE_OPERAND (d, 1);
7657 d = TREE_OPERAND (d, 0);
7658 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7660 tree b = TREE_OPERAND (d, 1);
7661 b = maybe_lookup_decl (b, ctx);
7662 if (b == NULL)
7664 b = TREE_OPERAND (d, 1);
7665 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7667 if (integer_zerop (bias))
7668 bias = b;
7669 else
7671 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7672 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7673 TREE_TYPE (b), b, bias);
7675 d = TREE_OPERAND (d, 0);
7677 /* For ref build_outer_var_ref already performs this, so
7678 only new_var needs a dereference. */
7679 if (TREE_CODE (d) == INDIRECT_REF)
7681 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7682 gcc_assert (omp_is_reference (var) && var == orig_var);
7684 else if (TREE_CODE (d) == ADDR_EXPR)
7686 if (orig_var == var)
7688 new_var = build_fold_addr_expr (new_var);
7689 ref = build_fold_addr_expr (ref);
7692 else
7694 gcc_assert (orig_var == var);
7695 if (omp_is_reference (var))
7696 ref = build_fold_addr_expr (ref);
7698 if (DECL_P (v))
7700 tree t = maybe_lookup_decl (v, ctx);
7701 if (t)
7702 v = t;
7703 else
7704 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7705 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7707 if (!integer_zerop (bias))
7709 bias = fold_convert_loc (clause_loc, sizetype, bias);
7710 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7711 TREE_TYPE (new_var), new_var,
7712 unshare_expr (bias));
7713 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7714 TREE_TYPE (ref), ref, bias);
7716 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7717 ref = fold_convert_loc (clause_loc, ptype, ref);
7718 tree m = create_tmp_var (ptype);
7719 gimplify_assign (m, new_var, stmt_seqp);
7720 new_var = m;
7721 m = create_tmp_var (ptype);
7722 gimplify_assign (m, ref, stmt_seqp);
7723 ref = m;
7724 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7725 tree body = create_artificial_label (UNKNOWN_LOCATION);
7726 tree end = create_artificial_label (UNKNOWN_LOCATION);
7727 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7728 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7729 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7730 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7732 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7733 tree decl_placeholder
7734 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7735 SET_DECL_VALUE_EXPR (placeholder, out);
7736 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7737 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7738 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7739 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7740 gimple_seq_add_seq (&sub_seq,
7741 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7742 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7743 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7744 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7746 else
7748 tree out2 = out;
7749 tree priv2 = priv;
7750 if (is_truth_op)
7752 tree zero = build_zero_cst (TREE_TYPE (out));
7753 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7754 boolean_type_node, out, zero);
7755 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7756 boolean_type_node, priv, zero);
7758 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7759 if (is_truth_op)
7760 x = fold_convert (TREE_TYPE (out), x);
7761 out = unshare_expr (out);
7762 gimplify_assign (out, x, &sub_seq);
7764 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7765 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7766 gimple_seq_add_stmt (&sub_seq, g);
7767 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7768 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7769 gimple_seq_add_stmt (&sub_seq, g);
7770 g = gimple_build_assign (i, PLUS_EXPR, i,
7771 build_int_cst (TREE_TYPE (i), 1));
7772 gimple_seq_add_stmt (&sub_seq, g);
7773 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7774 gimple_seq_add_stmt (&sub_seq, g);
7775 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7777 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7779 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7781 if (omp_is_reference (var)
7782 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7783 TREE_TYPE (ref)))
7784 ref = build_fold_addr_expr_loc (clause_loc, ref);
7785 SET_DECL_VALUE_EXPR (placeholder, ref);
7786 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7787 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7788 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7789 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7790 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7792 else
7794 tree new_var2 = new_var;
7795 tree ref2 = ref;
7796 if (is_truth_op)
7798 tree zero = build_zero_cst (TREE_TYPE (new_var));
7799 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7800 boolean_type_node, new_var, zero);
7801 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7802 ref, zero);
7804 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7805 if (is_truth_op)
7806 x = fold_convert (TREE_TYPE (new_var), x);
7807 ref = build_outer_var_ref (var, ctx);
7808 gimplify_assign (ref, x, &sub_seq);
7812 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7814 gimple_seq_add_stmt (stmt_seqp, stmt);
7816 gimple_seq_add_seq (stmt_seqp, sub_seq);
7818 if (clist)
7820 gimple_seq_add_seq (stmt_seqp, *clist);
7821 *clist = NULL;
7824 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7826 gimple_seq_add_stmt (stmt_seqp, stmt);
7830 /* Generate code to implement the COPYPRIVATE clauses. */
7832 static void
7833 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7834 omp_context *ctx)
7836 tree c;
7838 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7840 tree var, new_var, ref, x;
7841 bool by_ref;
7842 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7844 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7845 continue;
7847 var = OMP_CLAUSE_DECL (c);
7848 by_ref = use_pointer_for_field (var, NULL);
7850 ref = build_sender_ref (var, ctx);
7851 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7852 if (by_ref)
7854 x = build_fold_addr_expr_loc (clause_loc, new_var);
7855 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7857 gimplify_assign (ref, x, slist);
7859 ref = build_receiver_ref (var, false, ctx);
7860 if (by_ref)
7862 ref = fold_convert_loc (clause_loc,
7863 build_pointer_type (TREE_TYPE (new_var)),
7864 ref);
7865 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7867 if (omp_is_reference (var))
7869 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7870 ref = build_simple_mem_ref_loc (clause_loc, ref);
7871 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7873 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7874 gimplify_and_add (x, rlist);
7879 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7880 and REDUCTION from the sender (aka parent) side. */
7882 static void
7883 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7884 omp_context *ctx)
7886 tree c, t;
7887 int ignored_looptemp = 0;
7888 bool is_taskloop = false;
7890 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7891 by GOMP_taskloop. */
7892 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7894 ignored_looptemp = 2;
7895 is_taskloop = true;
7898 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7900 tree val, ref, x, var;
7901 bool by_ref, do_in = false, do_out = false;
7902 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7904 switch (OMP_CLAUSE_CODE (c))
7906 case OMP_CLAUSE_PRIVATE:
7907 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7908 break;
7909 continue;
7910 case OMP_CLAUSE_FIRSTPRIVATE:
7911 case OMP_CLAUSE_COPYIN:
7912 case OMP_CLAUSE_LASTPRIVATE:
7913 case OMP_CLAUSE_IN_REDUCTION:
7914 case OMP_CLAUSE__REDUCTEMP_:
7915 break;
7916 case OMP_CLAUSE_REDUCTION:
7917 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7918 continue;
7919 break;
7920 case OMP_CLAUSE_SHARED:
7921 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7922 break;
7923 continue;
7924 case OMP_CLAUSE__LOOPTEMP_:
7925 if (ignored_looptemp)
7927 ignored_looptemp--;
7928 continue;
7930 break;
7931 default:
7932 continue;
7935 val = OMP_CLAUSE_DECL (c);
7936 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7937 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7938 && TREE_CODE (val) == MEM_REF)
7940 val = TREE_OPERAND (val, 0);
7941 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7942 val = TREE_OPERAND (val, 0);
7943 if (TREE_CODE (val) == INDIRECT_REF
7944 || TREE_CODE (val) == ADDR_EXPR)
7945 val = TREE_OPERAND (val, 0);
7946 if (is_variable_sized (val))
7947 continue;
7950 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7951 outer taskloop region. */
7952 omp_context *ctx_for_o = ctx;
7953 if (is_taskloop
7954 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7955 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7956 ctx_for_o = ctx->outer;
7958 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7960 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7961 && is_global_var (var)
7962 && (val == OMP_CLAUSE_DECL (c)
7963 || !is_task_ctx (ctx)
7964 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7965 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7966 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7967 != POINTER_TYPE)))))
7968 continue;
7970 t = omp_member_access_dummy_var (var);
7971 if (t)
7973 var = DECL_VALUE_EXPR (var);
7974 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7975 if (o != t)
7976 var = unshare_and_remap (var, t, o);
7977 else
7978 var = unshare_expr (var);
7981 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7983 /* Handle taskloop firstprivate/lastprivate, where the
7984 lastprivate on GIMPLE_OMP_TASK is represented as
7985 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7986 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7987 x = omp_build_component_ref (ctx->sender_decl, f);
7988 if (use_pointer_for_field (val, ctx))
7989 var = build_fold_addr_expr (var);
7990 gimplify_assign (x, var, ilist);
7991 DECL_ABSTRACT_ORIGIN (f) = NULL;
7992 continue;
7995 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7996 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7997 || val == OMP_CLAUSE_DECL (c))
7998 && is_variable_sized (val))
7999 continue;
8000 by_ref = use_pointer_for_field (val, NULL);
8002 switch (OMP_CLAUSE_CODE (c))
8004 case OMP_CLAUSE_FIRSTPRIVATE:
8005 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8006 && !by_ref
8007 && is_task_ctx (ctx))
8008 suppress_warning (var);
8009 do_in = true;
8010 break;
8012 case OMP_CLAUSE_PRIVATE:
8013 case OMP_CLAUSE_COPYIN:
8014 case OMP_CLAUSE__LOOPTEMP_:
8015 case OMP_CLAUSE__REDUCTEMP_:
8016 do_in = true;
8017 break;
8019 case OMP_CLAUSE_LASTPRIVATE:
8020 if (by_ref || omp_is_reference (val))
8022 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8023 continue;
8024 do_in = true;
8026 else
8028 do_out = true;
8029 if (lang_hooks.decls.omp_private_outer_ref (val))
8030 do_in = true;
8032 break;
8034 case OMP_CLAUSE_REDUCTION:
8035 case OMP_CLAUSE_IN_REDUCTION:
8036 do_in = true;
8037 if (val == OMP_CLAUSE_DECL (c))
8039 if (is_task_ctx (ctx))
8040 by_ref = use_pointer_for_field (val, ctx);
8041 else
8042 do_out = !(by_ref || omp_is_reference (val));
8044 else
8045 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8046 break;
8048 default:
8049 gcc_unreachable ();
8052 if (do_in)
8054 ref = build_sender_ref (val, ctx);
8055 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8056 gimplify_assign (ref, x, ilist);
8057 if (is_task_ctx (ctx))
8058 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8061 if (do_out)
8063 ref = build_sender_ref (val, ctx);
8064 gimplify_assign (var, ref, olist);
8069 /* Generate code to implement SHARED from the sender (aka parent)
8070 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8071 list things that got automatically shared. */
8073 static void
8074 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8076 tree var, ovar, nvar, t, f, x, record_type;
8078 if (ctx->record_type == NULL)
8079 return;
8081 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8082 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8084 ovar = DECL_ABSTRACT_ORIGIN (f);
8085 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8086 continue;
8088 nvar = maybe_lookup_decl (ovar, ctx);
8089 if (!nvar
8090 || !DECL_HAS_VALUE_EXPR_P (nvar)
8091 || (ctx->allocate_map
8092 && ctx->allocate_map->get (ovar)))
8093 continue;
8095 /* If CTX is a nested parallel directive. Find the immediately
8096 enclosing parallel or workshare construct that contains a
8097 mapping for OVAR. */
8098 var = lookup_decl_in_outer_ctx (ovar, ctx);
8100 t = omp_member_access_dummy_var (var);
8101 if (t)
8103 var = DECL_VALUE_EXPR (var);
8104 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8105 if (o != t)
8106 var = unshare_and_remap (var, t, o);
8107 else
8108 var = unshare_expr (var);
8111 if (use_pointer_for_field (ovar, ctx))
8113 x = build_sender_ref (ovar, ctx);
8114 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8115 && TREE_TYPE (f) == TREE_TYPE (ovar))
8117 gcc_assert (is_parallel_ctx (ctx)
8118 && DECL_ARTIFICIAL (ovar));
8119 /* _condtemp_ clause. */
8120 var = build_constructor (TREE_TYPE (x), NULL);
8122 else
8123 var = build_fold_addr_expr (var);
8124 gimplify_assign (x, var, ilist);
8126 else
8128 x = build_sender_ref (ovar, ctx);
8129 gimplify_assign (x, var, ilist);
8131 if (!TREE_READONLY (var)
8132 /* We don't need to receive a new reference to a result
8133 or parm decl. In fact we may not store to it as we will
8134 invalidate any pending RSO and generate wrong gimple
8135 during inlining. */
8136 && !((TREE_CODE (var) == RESULT_DECL
8137 || TREE_CODE (var) == PARM_DECL)
8138 && DECL_BY_REFERENCE (var)))
8140 x = build_sender_ref (ovar, ctx);
8141 gimplify_assign (var, x, olist);
8147 /* Emit an OpenACC head marker call, encapulating the partitioning and
8148 other information that must be processed by the target compiler.
8149 Return the maximum number of dimensions the associated loop might
8150 be partitioned over. */
8152 static unsigned
8153 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8154 gimple_seq *seq, omp_context *ctx)
8156 unsigned levels = 0;
8157 unsigned tag = 0;
8158 tree gang_static = NULL_TREE;
8159 auto_vec<tree, 5> args;
8161 args.quick_push (build_int_cst
8162 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8163 args.quick_push (ddvar);
8164 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8166 switch (OMP_CLAUSE_CODE (c))
8168 case OMP_CLAUSE_GANG:
8169 tag |= OLF_DIM_GANG;
8170 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8171 /* static:* is represented by -1, and we can ignore it, as
8172 scheduling is always static. */
8173 if (gang_static && integer_minus_onep (gang_static))
8174 gang_static = NULL_TREE;
8175 levels++;
8176 break;
8178 case OMP_CLAUSE_WORKER:
8179 tag |= OLF_DIM_WORKER;
8180 levels++;
8181 break;
8183 case OMP_CLAUSE_VECTOR:
8184 tag |= OLF_DIM_VECTOR;
8185 levels++;
8186 break;
8188 case OMP_CLAUSE_SEQ:
8189 tag |= OLF_SEQ;
8190 break;
8192 case OMP_CLAUSE_AUTO:
8193 tag |= OLF_AUTO;
8194 break;
8196 case OMP_CLAUSE_INDEPENDENT:
8197 tag |= OLF_INDEPENDENT;
8198 break;
8200 case OMP_CLAUSE_TILE:
8201 tag |= OLF_TILE;
8202 break;
8204 default:
8205 continue;
8209 if (gang_static)
8211 if (DECL_P (gang_static))
8212 gang_static = build_outer_var_ref (gang_static, ctx);
8213 tag |= OLF_GANG_STATIC;
8216 omp_context *tgt = enclosing_target_ctx (ctx);
8217 if (!tgt || is_oacc_parallel_or_serial (tgt))
8219 else if (is_oacc_kernels (tgt))
8220 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8221 gcc_unreachable ();
8222 else if (is_oacc_kernels_decomposed_part (tgt))
8224 else
8225 gcc_unreachable ();
8227 /* In a parallel region, loops are implicitly INDEPENDENT. */
8228 if (!tgt || is_oacc_parallel_or_serial (tgt))
8229 tag |= OLF_INDEPENDENT;
8231 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8232 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8233 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8235 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8236 gcc_assert (!(tag & OLF_AUTO));
8239 if (tag & OLF_TILE)
8240 /* Tiling could use all 3 levels. */
8241 levels = 3;
8242 else
8244 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8245 Ensure at least one level, or 2 for possible auto
8246 partitioning */
8247 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8248 << OLF_DIM_BASE) | OLF_SEQ));
8250 if (levels < 1u + maybe_auto)
8251 levels = 1u + maybe_auto;
8254 args.quick_push (build_int_cst (integer_type_node, levels));
8255 args.quick_push (build_int_cst (integer_type_node, tag));
8256 if (gang_static)
8257 args.quick_push (gang_static);
8259 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8260 gimple_set_location (call, loc);
8261 gimple_set_lhs (call, ddvar);
8262 gimple_seq_add_stmt (seq, call);
8264 return levels;
8267 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8268 partitioning level of the enclosed region. */
8270 static void
8271 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8272 tree tofollow, gimple_seq *seq)
8274 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8275 : IFN_UNIQUE_OACC_TAIL_MARK);
8276 tree marker = build_int_cst (integer_type_node, marker_kind);
8277 int nargs = 2 + (tofollow != NULL_TREE);
8278 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8279 marker, ddvar, tofollow);
8280 gimple_set_location (call, loc);
8281 gimple_set_lhs (call, ddvar);
8282 gimple_seq_add_stmt (seq, call);
8285 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8286 the loop clauses, from which we extract reductions. Initialize
8287 HEAD and TAIL. */
8289 static void
8290 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8291 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8293 bool inner = false;
8294 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8295 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8297 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8299 if (private_marker)
8301 gimple_set_location (private_marker, loc);
8302 gimple_call_set_lhs (private_marker, ddvar);
8303 gimple_call_set_arg (private_marker, 1, ddvar);
8306 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8307 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8309 gcc_assert (count);
8310 for (unsigned done = 1; count; count--, done++)
8312 gimple_seq fork_seq = NULL;
8313 gimple_seq join_seq = NULL;
8315 tree place = build_int_cst (integer_type_node, -1);
8316 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8317 fork_kind, ddvar, place);
8318 gimple_set_location (fork, loc);
8319 gimple_set_lhs (fork, ddvar);
8321 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8322 join_kind, ddvar, place);
8323 gimple_set_location (join, loc);
8324 gimple_set_lhs (join, ddvar);
8326 /* Mark the beginning of this level sequence. */
8327 if (inner)
8328 lower_oacc_loop_marker (loc, ddvar, true,
8329 build_int_cst (integer_type_node, count),
8330 &fork_seq);
8331 lower_oacc_loop_marker (loc, ddvar, false,
8332 build_int_cst (integer_type_node, done),
8333 &join_seq);
8335 lower_oacc_reductions (loc, clauses, place, inner,
8336 fork, (count == 1) ? private_marker : NULL,
8337 join, &fork_seq, &join_seq, ctx);
8339 /* Append this level to head. */
8340 gimple_seq_add_seq (head, fork_seq);
8341 /* Prepend it to tail. */
8342 gimple_seq_add_seq (&join_seq, *tail);
8343 *tail = join_seq;
8345 inner = true;
8348 /* Mark the end of the sequence. */
8349 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8350 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8353 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8354 catch handler and return it. This prevents programs from violating the
8355 structured block semantics with throws. */
8357 static gimple_seq
8358 maybe_catch_exception (gimple_seq body)
8360 gimple *g;
8361 tree decl;
8363 if (!flag_exceptions)
8364 return body;
8366 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8367 decl = lang_hooks.eh_protect_cleanup_actions ();
8368 else
8369 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8371 g = gimple_build_eh_must_not_throw (decl);
8372 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8373 GIMPLE_TRY_CATCH);
8375 return gimple_seq_alloc_with_stmt (g);
8379 /* Routines to lower OMP directives into OMP-GIMPLE. */
8381 /* If ctx is a worksharing context inside of a cancellable parallel
8382 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8383 and conditional branch to parallel's cancel_label to handle
8384 cancellation in the implicit barrier. */
8386 static void
8387 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8388 gimple_seq *body)
8390 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8391 if (gimple_omp_return_nowait_p (omp_return))
8392 return;
8393 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8394 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8395 && outer->cancellable)
8397 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8398 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8399 tree lhs = create_tmp_var (c_bool_type);
8400 gimple_omp_return_set_lhs (omp_return, lhs);
8401 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8402 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8403 fold_convert (c_bool_type,
8404 boolean_false_node),
8405 outer->cancel_label, fallthru_label);
8406 gimple_seq_add_stmt (body, g);
8407 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8409 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8410 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8411 return;
8414 /* Find the first task_reduction or reduction clause or return NULL
8415 if there are none. */
8417 static inline tree
8418 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8419 enum omp_clause_code ccode)
8421 while (1)
8423 clauses = omp_find_clause (clauses, ccode);
8424 if (clauses == NULL_TREE)
8425 return NULL_TREE;
8426 if (ccode != OMP_CLAUSE_REDUCTION
8427 || code == OMP_TASKLOOP
8428 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8429 return clauses;
8430 clauses = OMP_CLAUSE_CHAIN (clauses);
8434 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8435 gimple_seq *, gimple_seq *);
8437 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8438 CTX is the enclosing OMP context for the current statement. */
8440 static void
8441 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8443 tree block, control;
8444 gimple_stmt_iterator tgsi;
8445 gomp_sections *stmt;
8446 gimple *t;
8447 gbind *new_stmt, *bind;
8448 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8450 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8452 push_gimplify_context ();
8454 dlist = NULL;
8455 ilist = NULL;
8457 tree rclauses
8458 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8459 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8460 tree rtmp = NULL_TREE;
8461 if (rclauses)
8463 tree type = build_pointer_type (pointer_sized_int_node);
8464 tree temp = create_tmp_var (type);
8465 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8466 OMP_CLAUSE_DECL (c) = temp;
8467 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8468 gimple_omp_sections_set_clauses (stmt, c);
8469 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8470 gimple_omp_sections_clauses (stmt),
8471 &ilist, &tred_dlist);
8472 rclauses = c;
8473 rtmp = make_ssa_name (type);
8474 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8477 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8478 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8480 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8481 &ilist, &dlist, ctx, NULL);
8483 control = create_tmp_var (unsigned_type_node, ".section");
8484 gimple_omp_sections_set_control (stmt, control);
8486 new_body = gimple_omp_body (stmt);
8487 gimple_omp_set_body (stmt, NULL);
8488 tgsi = gsi_start (new_body);
8489 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8491 omp_context *sctx;
8492 gimple *sec_start;
8494 sec_start = gsi_stmt (tgsi);
8495 sctx = maybe_lookup_ctx (sec_start);
8496 gcc_assert (sctx);
8498 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8499 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8500 GSI_CONTINUE_LINKING);
8501 gimple_omp_set_body (sec_start, NULL);
8503 if (gsi_one_before_end_p (tgsi))
8505 gimple_seq l = NULL;
8506 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8507 &ilist, &l, &clist, ctx);
8508 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8509 gimple_omp_section_set_last (sec_start);
8512 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8513 GSI_CONTINUE_LINKING);
8516 block = make_node (BLOCK);
8517 bind = gimple_build_bind (NULL, new_body, block);
8519 olist = NULL;
8520 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8521 &clist, ctx);
8522 if (clist)
8524 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8525 gcall *g = gimple_build_call (fndecl, 0);
8526 gimple_seq_add_stmt (&olist, g);
8527 gimple_seq_add_seq (&olist, clist);
8528 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8529 g = gimple_build_call (fndecl, 0);
8530 gimple_seq_add_stmt (&olist, g);
8533 block = make_node (BLOCK);
8534 new_stmt = gimple_build_bind (NULL, NULL, block);
8535 gsi_replace (gsi_p, new_stmt, true);
8537 pop_gimplify_context (new_stmt);
8538 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8539 BLOCK_VARS (block) = gimple_bind_vars (bind);
8540 if (BLOCK_VARS (block))
8541 TREE_USED (block) = 1;
8543 new_body = NULL;
8544 gimple_seq_add_seq (&new_body, ilist);
8545 gimple_seq_add_stmt (&new_body, stmt);
8546 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8547 gimple_seq_add_stmt (&new_body, bind);
8549 t = gimple_build_omp_continue (control, control);
8550 gimple_seq_add_stmt (&new_body, t);
8552 gimple_seq_add_seq (&new_body, olist);
8553 if (ctx->cancellable)
8554 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8555 gimple_seq_add_seq (&new_body, dlist);
8557 new_body = maybe_catch_exception (new_body);
8559 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8560 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8561 t = gimple_build_omp_return (nowait);
8562 gimple_seq_add_stmt (&new_body, t);
8563 gimple_seq_add_seq (&new_body, tred_dlist);
8564 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8566 if (rclauses)
8567 OMP_CLAUSE_DECL (rclauses) = rtmp;
8569 gimple_bind_set_body (new_stmt, new_body);
8573 /* A subroutine of lower_omp_single. Expand the simple form of
8574 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8576 if (GOMP_single_start ())
8577 BODY;
8578 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8580 FIXME. It may be better to delay expanding the logic of this until
8581 pass_expand_omp. The expanded logic may make the job more difficult
8582 to a synchronization analysis pass. */
8584 static void
8585 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8587 location_t loc = gimple_location (single_stmt);
8588 tree tlabel = create_artificial_label (loc);
8589 tree flabel = create_artificial_label (loc);
8590 gimple *call, *cond;
8591 tree lhs, decl;
8593 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8594 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8595 call = gimple_build_call (decl, 0);
8596 gimple_call_set_lhs (call, lhs);
8597 gimple_seq_add_stmt (pre_p, call);
8599 cond = gimple_build_cond (EQ_EXPR, lhs,
8600 fold_convert_loc (loc, TREE_TYPE (lhs),
8601 boolean_true_node),
8602 tlabel, flabel);
8603 gimple_seq_add_stmt (pre_p, cond);
8604 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8605 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8606 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8610 /* A subroutine of lower_omp_single. Expand the simple form of
8611 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8613 #pragma omp single copyprivate (a, b, c)
8615 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8618 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8620 BODY;
8621 copyout.a = a;
8622 copyout.b = b;
8623 copyout.c = c;
8624 GOMP_single_copy_end (&copyout);
8626 else
8628 a = copyout_p->a;
8629 b = copyout_p->b;
8630 c = copyout_p->c;
8632 GOMP_barrier ();
8635 FIXME. It may be better to delay expanding the logic of this until
8636 pass_expand_omp. The expanded logic may make the job more difficult
8637 to a synchronization analysis pass. */
8639 static void
8640 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8641 omp_context *ctx)
8643 tree ptr_type, t, l0, l1, l2, bfn_decl;
8644 gimple_seq copyin_seq;
8645 location_t loc = gimple_location (single_stmt);
8647 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8649 ptr_type = build_pointer_type (ctx->record_type);
8650 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8652 l0 = create_artificial_label (loc);
8653 l1 = create_artificial_label (loc);
8654 l2 = create_artificial_label (loc);
8656 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8657 t = build_call_expr_loc (loc, bfn_decl, 0);
8658 t = fold_convert_loc (loc, ptr_type, t);
8659 gimplify_assign (ctx->receiver_decl, t, pre_p);
8661 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8662 build_int_cst (ptr_type, 0));
8663 t = build3 (COND_EXPR, void_type_node, t,
8664 build_and_jump (&l0), build_and_jump (&l1));
8665 gimplify_and_add (t, pre_p);
8667 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8669 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8671 copyin_seq = NULL;
8672 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8673 &copyin_seq, ctx);
8675 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8676 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8677 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8678 gimplify_and_add (t, pre_p);
8680 t = build_and_jump (&l2);
8681 gimplify_and_add (t, pre_p);
8683 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8685 gimple_seq_add_seq (pre_p, copyin_seq);
8687 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8691 /* Expand code for an OpenMP single directive. */
8693 static void
8694 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8696 tree block;
8697 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8698 gbind *bind;
8699 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8701 push_gimplify_context ();
8703 block = make_node (BLOCK);
8704 bind = gimple_build_bind (NULL, NULL, block);
8705 gsi_replace (gsi_p, bind, true);
8706 bind_body = NULL;
8707 dlist = NULL;
8708 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8709 &bind_body, &dlist, ctx, NULL);
8710 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8712 gimple_seq_add_stmt (&bind_body, single_stmt);
8714 if (ctx->record_type)
8715 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8716 else
8717 lower_omp_single_simple (single_stmt, &bind_body);
8719 gimple_omp_set_body (single_stmt, NULL);
8721 gimple_seq_add_seq (&bind_body, dlist);
8723 bind_body = maybe_catch_exception (bind_body);
8725 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8726 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8727 gimple *g = gimple_build_omp_return (nowait);
8728 gimple_seq_add_stmt (&bind_body_tail, g);
8729 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8730 if (ctx->record_type)
8732 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8733 tree clobber = build_clobber (ctx->record_type);
8734 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8735 clobber), GSI_SAME_STMT);
8737 gimple_seq_add_seq (&bind_body, bind_body_tail);
8738 gimple_bind_set_body (bind, bind_body);
8740 pop_gimplify_context (bind);
8742 gimple_bind_append_vars (bind, ctx->block_vars);
8743 BLOCK_VARS (block) = ctx->block_vars;
8744 if (BLOCK_VARS (block))
8745 TREE_USED (block) = 1;
8749 /* Lower code for an OMP scope directive. */
8751 static void
8752 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8754 tree block;
8755 gimple *scope_stmt = gsi_stmt (*gsi_p);
8756 gbind *bind;
8757 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8758 gimple_seq tred_dlist = NULL;
8760 push_gimplify_context ();
8762 block = make_node (BLOCK);
8763 bind = gimple_build_bind (NULL, NULL, block);
8764 gsi_replace (gsi_p, bind, true);
8765 bind_body = NULL;
8766 dlist = NULL;
8768 tree rclauses
8769 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8770 OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8771 if (rclauses)
8773 tree type = build_pointer_type (pointer_sized_int_node);
8774 tree temp = create_tmp_var (type);
8775 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8776 OMP_CLAUSE_DECL (c) = temp;
8777 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8778 gimple_omp_scope_set_clauses (scope_stmt, c);
8779 lower_omp_task_reductions (ctx, OMP_SCOPE,
8780 gimple_omp_scope_clauses (scope_stmt),
8781 &bind_body, &tred_dlist);
8782 rclauses = c;
8783 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8784 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8785 gimple_seq_add_stmt (&bind_body, stmt);
8788 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8789 &bind_body, &dlist, ctx, NULL);
8790 lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8792 gimple_seq_add_stmt (&bind_body, scope_stmt);
8794 gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8796 gimple_omp_set_body (scope_stmt, NULL);
8798 gimple_seq clist = NULL;
8799 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8800 &bind_body, &clist, ctx);
8801 if (clist)
8803 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8804 gcall *g = gimple_build_call (fndecl, 0);
8805 gimple_seq_add_stmt (&bind_body, g);
8806 gimple_seq_add_seq (&bind_body, clist);
8807 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8808 g = gimple_build_call (fndecl, 0);
8809 gimple_seq_add_stmt (&bind_body, g);
8812 gimple_seq_add_seq (&bind_body, dlist);
8814 bind_body = maybe_catch_exception (bind_body);
8816 bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
8817 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8818 gimple *g = gimple_build_omp_return (nowait);
8819 gimple_seq_add_stmt (&bind_body_tail, g);
8820 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
8821 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8822 if (ctx->record_type)
8824 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8825 tree clobber = build_clobber (ctx->record_type);
8826 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8827 clobber), GSI_SAME_STMT);
8829 gimple_seq_add_seq (&bind_body, bind_body_tail);
8831 gimple_bind_set_body (bind, bind_body);
8833 pop_gimplify_context (bind);
8835 gimple_bind_append_vars (bind, ctx->block_vars);
8836 BLOCK_VARS (block) = ctx->block_vars;
8837 if (BLOCK_VARS (block))
8838 TREE_USED (block) = 1;
8840 /* Expand code for an OpenMP master or masked directive. */
8842 static void
8843 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8845 tree block, lab = NULL, x, bfn_decl;
8846 gimple *stmt = gsi_stmt (*gsi_p);
8847 gbind *bind;
8848 location_t loc = gimple_location (stmt);
8849 gimple_seq tseq;
8850 tree filter = integer_zero_node;
8852 push_gimplify_context ();
8854 if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
8856 filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
8857 OMP_CLAUSE_FILTER);
8858 if (filter)
8859 filter = fold_convert (integer_type_node,
8860 OMP_CLAUSE_FILTER_EXPR (filter));
8861 else
8862 filter = integer_zero_node;
8864 block = make_node (BLOCK);
8865 bind = gimple_build_bind (NULL, NULL, block);
8866 gsi_replace (gsi_p, bind, true);
8867 gimple_bind_add_stmt (bind, stmt);
8869 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8870 x = build_call_expr_loc (loc, bfn_decl, 0);
8871 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
8872 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8873 tseq = NULL;
8874 gimplify_and_add (x, &tseq);
8875 gimple_bind_add_seq (bind, tseq);
8877 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8878 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8879 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8880 gimple_omp_set_body (stmt, NULL);
8882 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8884 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8886 pop_gimplify_context (bind);
8888 gimple_bind_append_vars (bind, ctx->block_vars);
8889 BLOCK_VARS (block) = ctx->block_vars;
8892 /* Helper function for lower_omp_task_reductions. For a specific PASS
8893 find out the current clause it should be processed, or return false
8894 if all have been processed already. */
8896 static inline bool
8897 omp_task_reduction_iterate (int pass, enum tree_code code,
8898 enum omp_clause_code ccode, tree *c, tree *decl,
8899 tree *type, tree *next)
8901 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8903 if (ccode == OMP_CLAUSE_REDUCTION
8904 && code != OMP_TASKLOOP
8905 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8906 continue;
8907 *decl = OMP_CLAUSE_DECL (*c);
8908 *type = TREE_TYPE (*decl);
8909 if (TREE_CODE (*decl) == MEM_REF)
8911 if (pass != 1)
8912 continue;
8914 else
8916 if (omp_is_reference (*decl))
8917 *type = TREE_TYPE (*type);
8918 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8919 continue;
8921 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8922 return true;
8924 *decl = NULL_TREE;
8925 *type = NULL_TREE;
8926 *next = NULL_TREE;
8927 return false;
8930 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8931 OMP_TASKGROUP only with task modifier). Register mapping of those in
8932 START sequence and reducing them and unregister them in the END sequence. */
8934 static void
8935 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8936 gimple_seq *start, gimple_seq *end)
8938 enum omp_clause_code ccode
8939 = (code == OMP_TASKGROUP
8940 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8941 tree cancellable = NULL_TREE;
8942 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8943 if (clauses == NULL_TREE)
8944 return;
8945 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
8947 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8948 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8949 && outer->cancellable)
8951 cancellable = error_mark_node;
8952 break;
8954 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8955 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8956 break;
8958 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8959 tree *last = &TYPE_FIELDS (record_type);
8960 unsigned cnt = 0;
8961 if (cancellable)
8963 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8964 ptr_type_node);
8965 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8966 integer_type_node);
8967 *last = field;
8968 DECL_CHAIN (field) = ifield;
8969 last = &DECL_CHAIN (ifield);
8970 DECL_CONTEXT (field) = record_type;
8971 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8972 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8973 DECL_CONTEXT (ifield) = record_type;
8974 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8975 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8977 for (int pass = 0; pass < 2; pass++)
8979 tree decl, type, next;
8980 for (tree c = clauses;
8981 omp_task_reduction_iterate (pass, code, ccode,
8982 &c, &decl, &type, &next); c = next)
8984 ++cnt;
8985 tree new_type = type;
8986 if (ctx->outer)
8987 new_type = remap_type (type, &ctx->outer->cb);
8988 tree field
8989 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8990 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8991 new_type);
8992 if (DECL_P (decl) && type == TREE_TYPE (decl))
8994 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8995 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8996 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8998 else
8999 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9000 DECL_CONTEXT (field) = record_type;
9001 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9002 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9003 *last = field;
9004 last = &DECL_CHAIN (field);
9005 tree bfield
9006 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9007 boolean_type_node);
9008 DECL_CONTEXT (bfield) = record_type;
9009 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9010 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9011 *last = bfield;
9012 last = &DECL_CHAIN (bfield);
9015 *last = NULL_TREE;
9016 layout_type (record_type);
9018 /* Build up an array which registers with the runtime all the reductions
9019 and deregisters them at the end. Format documented in libgomp/task.c. */
9020 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9021 tree avar = create_tmp_var_raw (atype);
9022 gimple_add_tmp_var (avar);
9023 TREE_ADDRESSABLE (avar) = 1;
9024 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9025 NULL_TREE, NULL_TREE);
9026 tree t = build_int_cst (pointer_sized_int_node, cnt);
9027 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9028 gimple_seq seq = NULL;
9029 tree sz = fold_convert (pointer_sized_int_node,
9030 TYPE_SIZE_UNIT (record_type));
9031 int cachesz = 64;
9032 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9033 build_int_cst (pointer_sized_int_node, cachesz - 1));
9034 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9035 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9036 ctx->task_reductions.create (1 + cnt);
9037 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9038 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9039 ? sz : NULL_TREE);
9040 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9041 gimple_seq_add_seq (start, seq);
9042 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9043 NULL_TREE, NULL_TREE);
9044 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9045 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9046 NULL_TREE, NULL_TREE);
9047 t = build_int_cst (pointer_sized_int_node,
9048 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9049 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9050 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9051 NULL_TREE, NULL_TREE);
9052 t = build_int_cst (pointer_sized_int_node, -1);
9053 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9054 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9055 NULL_TREE, NULL_TREE);
9056 t = build_int_cst (pointer_sized_int_node, 0);
9057 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9059 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9060 and for each task reduction checks a bool right after the private variable
9061 within that thread's chunk; if the bool is clear, it hasn't been
9062 initialized and thus isn't going to be reduced nor destructed, otherwise
9063 reduce and destruct it. */
9064 tree idx = create_tmp_var (size_type_node);
9065 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9066 tree num_thr_sz = create_tmp_var (size_type_node);
9067 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9068 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9069 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9070 gimple *g;
9071 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9073 /* For worksharing constructs or scope, only perform it in the master
9074 thread, with the exception of cancelled implicit barriers - then only
9075 handle the current thread. */
9076 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9077 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9078 tree thr_num = create_tmp_var (integer_type_node);
9079 g = gimple_build_call (t, 0);
9080 gimple_call_set_lhs (g, thr_num);
9081 gimple_seq_add_stmt (end, g);
9082 if (cancellable)
9084 tree c;
9085 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9086 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9087 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9088 if (code == OMP_FOR)
9089 c = gimple_omp_for_clauses (ctx->stmt);
9090 else if (code == OMP_SECTIONS)
9091 c = gimple_omp_sections_clauses (ctx->stmt);
9092 else /* if (code == OMP_SCOPE) */
9093 c = gimple_omp_scope_clauses (ctx->stmt);
9094 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9095 cancellable = c;
9096 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9097 lab5, lab6);
9098 gimple_seq_add_stmt (end, g);
9099 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9100 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9101 gimple_seq_add_stmt (end, g);
9102 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9103 build_one_cst (TREE_TYPE (idx)));
9104 gimple_seq_add_stmt (end, g);
9105 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9106 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9108 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9109 gimple_seq_add_stmt (end, g);
9110 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9112 if (code != OMP_PARALLEL)
9114 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9115 tree num_thr = create_tmp_var (integer_type_node);
9116 g = gimple_build_call (t, 0);
9117 gimple_call_set_lhs (g, num_thr);
9118 gimple_seq_add_stmt (end, g);
9119 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9120 gimple_seq_add_stmt (end, g);
9121 if (cancellable)
9122 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9124 else
9126 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9127 OMP_CLAUSE__REDUCTEMP_);
9128 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9129 t = fold_convert (size_type_node, t);
9130 gimplify_assign (num_thr_sz, t, end);
9132 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9133 NULL_TREE, NULL_TREE);
9134 tree data = create_tmp_var (pointer_sized_int_node);
9135 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9136 if (code == OMP_TASKLOOP)
9138 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9139 g = gimple_build_cond (NE_EXPR, data,
9140 build_zero_cst (pointer_sized_int_node),
9141 lab1, lab7);
9142 gimple_seq_add_stmt (end, g);
9144 gimple_seq_add_stmt (end, gimple_build_label (lab1));
9145 tree ptr;
9146 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9147 ptr = create_tmp_var (build_pointer_type (record_type));
9148 else
9149 ptr = create_tmp_var (ptr_type_node);
9150 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9152 tree field = TYPE_FIELDS (record_type);
9153 cnt = 0;
9154 if (cancellable)
9155 field = DECL_CHAIN (DECL_CHAIN (field));
9156 for (int pass = 0; pass < 2; pass++)
9158 tree decl, type, next;
9159 for (tree c = clauses;
9160 omp_task_reduction_iterate (pass, code, ccode,
9161 &c, &decl, &type, &next); c = next)
9163 tree var = decl, ref;
9164 if (TREE_CODE (decl) == MEM_REF)
9166 var = TREE_OPERAND (var, 0);
9167 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9168 var = TREE_OPERAND (var, 0);
9169 tree v = var;
9170 if (TREE_CODE (var) == ADDR_EXPR)
9171 var = TREE_OPERAND (var, 0);
9172 else if (TREE_CODE (var) == INDIRECT_REF)
9173 var = TREE_OPERAND (var, 0);
9174 tree orig_var = var;
9175 if (is_variable_sized (var))
9177 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9178 var = DECL_VALUE_EXPR (var);
9179 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
9180 var = TREE_OPERAND (var, 0);
9181 gcc_assert (DECL_P (var));
9183 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9184 if (orig_var != var)
9185 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9186 else if (TREE_CODE (v) == ADDR_EXPR)
9187 t = build_fold_addr_expr (t);
9188 else if (TREE_CODE (v) == INDIRECT_REF)
9189 t = build_fold_indirect_ref (t);
9190 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9192 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9193 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9194 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9196 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9197 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9198 fold_convert (size_type_node,
9199 TREE_OPERAND (decl, 1)));
9201 else
9203 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9204 if (!omp_is_reference (decl))
9205 t = build_fold_addr_expr (t);
9207 t = fold_convert (pointer_sized_int_node, t);
9208 seq = NULL;
9209 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9210 gimple_seq_add_seq (start, seq);
9211 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9212 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9213 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9214 t = unshare_expr (byte_position (field));
9215 t = fold_convert (pointer_sized_int_node, t);
9216 ctx->task_reduction_map->put (c, cnt);
9217 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9218 ? t : NULL_TREE);
9219 seq = NULL;
9220 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9221 gimple_seq_add_seq (start, seq);
9222 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9223 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9224 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9226 tree bfield = DECL_CHAIN (field);
9227 tree cond;
9228 if (code == OMP_PARALLEL
9229 || code == OMP_FOR
9230 || code == OMP_SECTIONS
9231 || code == OMP_SCOPE)
9232 /* In parallel, worksharing or scope all threads unconditionally
9233 initialize all their task reduction private variables. */
9234 cond = boolean_true_node;
9235 else if (TREE_TYPE (ptr) == ptr_type_node)
9237 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9238 unshare_expr (byte_position (bfield)));
9239 seq = NULL;
9240 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9241 gimple_seq_add_seq (end, seq);
9242 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9243 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9244 build_int_cst (pbool, 0));
9246 else
9247 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9248 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9249 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9250 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9251 tree condv = create_tmp_var (boolean_type_node);
9252 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9253 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9254 lab3, lab4);
9255 gimple_seq_add_stmt (end, g);
9256 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9257 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9259 /* If this reduction doesn't need destruction and parallel
9260 has been cancelled, there is nothing to do for this
9261 reduction, so jump around the merge operation. */
9262 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9263 g = gimple_build_cond (NE_EXPR, cancellable,
9264 build_zero_cst (TREE_TYPE (cancellable)),
9265 lab4, lab5);
9266 gimple_seq_add_stmt (end, g);
9267 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9270 tree new_var;
9271 if (TREE_TYPE (ptr) == ptr_type_node)
9273 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9274 unshare_expr (byte_position (field)));
9275 seq = NULL;
9276 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9277 gimple_seq_add_seq (end, seq);
9278 tree pbool = build_pointer_type (TREE_TYPE (field));
9279 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9280 build_int_cst (pbool, 0));
9282 else
9283 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9284 build_simple_mem_ref (ptr), field, NULL_TREE);
9286 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9287 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
9288 ref = build_simple_mem_ref (ref);
9289 /* reduction(-:var) sums up the partial results, so it acts
9290 identically to reduction(+:var). */
9291 if (rcode == MINUS_EXPR)
9292 rcode = PLUS_EXPR;
9293 if (TREE_CODE (decl) == MEM_REF)
9295 tree type = TREE_TYPE (new_var);
9296 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9297 tree i = create_tmp_var (TREE_TYPE (v));
9298 tree ptype = build_pointer_type (TREE_TYPE (type));
9299 if (DECL_P (v))
9301 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9302 tree vv = create_tmp_var (TREE_TYPE (v));
9303 gimplify_assign (vv, v, start);
9304 v = vv;
9306 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9307 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9308 new_var = build_fold_addr_expr (new_var);
9309 new_var = fold_convert (ptype, new_var);
9310 ref = fold_convert (ptype, ref);
9311 tree m = create_tmp_var (ptype);
9312 gimplify_assign (m, new_var, end);
9313 new_var = m;
9314 m = create_tmp_var (ptype);
9315 gimplify_assign (m, ref, end);
9316 ref = m;
9317 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9318 tree body = create_artificial_label (UNKNOWN_LOCATION);
9319 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9320 gimple_seq_add_stmt (end, gimple_build_label (body));
9321 tree priv = build_simple_mem_ref (new_var);
9322 tree out = build_simple_mem_ref (ref);
9323 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9325 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9326 tree decl_placeholder
9327 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9328 tree lab6 = NULL_TREE;
9329 if (cancellable)
9331 /* If this reduction needs destruction and parallel
9332 has been cancelled, jump around the merge operation
9333 to the destruction. */
9334 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9335 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9336 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9337 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9338 lab6, lab5);
9339 gimple_seq_add_stmt (end, g);
9340 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9342 SET_DECL_VALUE_EXPR (placeholder, out);
9343 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9344 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9345 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9346 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9347 gimple_seq_add_seq (end,
9348 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9349 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9350 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9352 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9353 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9355 if (cancellable)
9356 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9357 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9358 if (x)
9360 gimple_seq tseq = NULL;
9361 gimplify_stmt (&x, &tseq);
9362 gimple_seq_add_seq (end, tseq);
9365 else
9367 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9368 out = unshare_expr (out);
9369 gimplify_assign (out, x, end);
9371 gimple *g
9372 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9373 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9374 gimple_seq_add_stmt (end, g);
9375 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9376 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9377 gimple_seq_add_stmt (end, g);
9378 g = gimple_build_assign (i, PLUS_EXPR, i,
9379 build_int_cst (TREE_TYPE (i), 1));
9380 gimple_seq_add_stmt (end, g);
9381 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9382 gimple_seq_add_stmt (end, g);
9383 gimple_seq_add_stmt (end, gimple_build_label (endl));
9385 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9387 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9388 tree oldv = NULL_TREE;
9389 tree lab6 = NULL_TREE;
9390 if (cancellable)
9392 /* If this reduction needs destruction and parallel
9393 has been cancelled, jump around the merge operation
9394 to the destruction. */
9395 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9396 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9397 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9398 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9399 lab6, lab5);
9400 gimple_seq_add_stmt (end, g);
9401 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9403 if (omp_is_reference (decl)
9404 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9405 TREE_TYPE (ref)))
9406 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9407 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9408 tree refv = create_tmp_var (TREE_TYPE (ref));
9409 gimplify_assign (refv, ref, end);
9410 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9411 SET_DECL_VALUE_EXPR (placeholder, ref);
9412 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9413 tree d = maybe_lookup_decl (decl, ctx);
9414 gcc_assert (d);
9415 if (DECL_HAS_VALUE_EXPR_P (d))
9416 oldv = DECL_VALUE_EXPR (d);
9417 if (omp_is_reference (var))
9419 tree v = fold_convert (TREE_TYPE (d),
9420 build_fold_addr_expr (new_var));
9421 SET_DECL_VALUE_EXPR (d, v);
9423 else
9424 SET_DECL_VALUE_EXPR (d, new_var);
9425 DECL_HAS_VALUE_EXPR_P (d) = 1;
9426 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9427 if (oldv)
9428 SET_DECL_VALUE_EXPR (d, oldv);
9429 else
9431 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9432 DECL_HAS_VALUE_EXPR_P (d) = 0;
9434 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9435 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9436 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9437 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9438 if (cancellable)
9439 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9440 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9441 if (x)
9443 gimple_seq tseq = NULL;
9444 gimplify_stmt (&x, &tseq);
9445 gimple_seq_add_seq (end, tseq);
9448 else
9450 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9451 ref = unshare_expr (ref);
9452 gimplify_assign (ref, x, end);
9454 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9455 ++cnt;
9456 field = DECL_CHAIN (bfield);
9460 if (code == OMP_TASKGROUP)
9462 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9463 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9464 gimple_seq_add_stmt (start, g);
9466 else
9468 tree c;
9469 if (code == OMP_FOR)
9470 c = gimple_omp_for_clauses (ctx->stmt);
9471 else if (code == OMP_SECTIONS)
9472 c = gimple_omp_sections_clauses (ctx->stmt);
9473 else if (code == OMP_SCOPE)
9474 c = gimple_omp_scope_clauses (ctx->stmt);
9475 else
9476 c = gimple_omp_taskreg_clauses (ctx->stmt);
9477 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9478 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9479 build_fold_addr_expr (avar));
9480 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9483 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9484 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9485 size_one_node));
9486 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9487 gimple_seq_add_stmt (end, g);
9488 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9489 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9491 enum built_in_function bfn
9492 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9493 t = builtin_decl_explicit (bfn);
9494 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9495 tree arg;
9496 if (cancellable)
9498 arg = create_tmp_var (c_bool_type);
9499 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9500 cancellable));
9502 else
9503 arg = build_int_cst (c_bool_type, 0);
9504 g = gimple_build_call (t, 1, arg);
9506 else
9508 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9509 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9511 gimple_seq_add_stmt (end, g);
9512 if (lab7)
9513 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9514 t = build_constructor (atype, NULL);
9515 TREE_THIS_VOLATILE (t) = 1;
9516 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9519 /* Expand code for an OpenMP taskgroup directive. */
9521 static void
9522 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9524 gimple *stmt = gsi_stmt (*gsi_p);
9525 gcall *x;
9526 gbind *bind;
9527 gimple_seq dseq = NULL;
9528 tree block = make_node (BLOCK);
9530 bind = gimple_build_bind (NULL, NULL, block);
9531 gsi_replace (gsi_p, bind, true);
9532 gimple_bind_add_stmt (bind, stmt);
9534 push_gimplify_context ();
9536 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9538 gimple_bind_add_stmt (bind, x);
9540 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9541 gimple_omp_taskgroup_clauses (stmt),
9542 gimple_bind_body_ptr (bind), &dseq);
9544 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9545 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9546 gimple_omp_set_body (stmt, NULL);
9548 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9549 gimple_bind_add_seq (bind, dseq);
9551 pop_gimplify_context (bind);
9553 gimple_bind_append_vars (bind, ctx->block_vars);
9554 BLOCK_VARS (block) = ctx->block_vars;
9558 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9560 static void
9561 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9562 omp_context *ctx)
9564 struct omp_for_data fd;
9565 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9566 return;
9568 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9569 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9570 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9571 if (!fd.ordered)
9572 return;
9574 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9575 tree c = gimple_omp_ordered_clauses (ord_stmt);
9576 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9577 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9579 /* Merge depend clauses from multiple adjacent
9580 #pragma omp ordered depend(sink:...) constructs
9581 into one #pragma omp ordered depend(sink:...), so that
9582 we can optimize them together. */
9583 gimple_stmt_iterator gsi = *gsi_p;
9584 gsi_next (&gsi);
9585 while (!gsi_end_p (gsi))
9587 gimple *stmt = gsi_stmt (gsi);
9588 if (is_gimple_debug (stmt)
9589 || gimple_code (stmt) == GIMPLE_NOP)
9591 gsi_next (&gsi);
9592 continue;
9594 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9595 break;
9596 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9597 c = gimple_omp_ordered_clauses (ord_stmt2);
9598 if (c == NULL_TREE
9599 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
9600 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9601 break;
9602 while (*list_p)
9603 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9604 *list_p = c;
9605 gsi_remove (&gsi, true);
9609 /* Canonicalize sink dependence clauses into one folded clause if
9610 possible.
9612 The basic algorithm is to create a sink vector whose first
9613 element is the GCD of all the first elements, and whose remaining
9614 elements are the minimum of the subsequent columns.
9616 We ignore dependence vectors whose first element is zero because
9617 such dependencies are known to be executed by the same thread.
9619 We take into account the direction of the loop, so a minimum
9620 becomes a maximum if the loop is iterating forwards. We also
9621 ignore sink clauses where the loop direction is unknown, or where
9622 the offsets are clearly invalid because they are not a multiple
9623 of the loop increment.
9625 For example:
9627 #pragma omp for ordered(2)
9628 for (i=0; i < N; ++i)
9629 for (j=0; j < M; ++j)
9631 #pragma omp ordered \
9632 depend(sink:i-8,j-2) \
9633 depend(sink:i,j-1) \ // Completely ignored because i+0.
9634 depend(sink:i-4,j-3) \
9635 depend(sink:i-6,j-4)
9636 #pragma omp ordered depend(source)
9639 Folded clause is:
9641 depend(sink:-gcd(8,4,6),-min(2,3,4))
9642 -or-
9643 depend(sink:-2,-2)
9646 /* FIXME: Computing GCD's where the first element is zero is
9647 non-trivial in the presence of collapsed loops. Do this later. */
9648 if (fd.collapse > 1)
9649 return;
9651 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9653 /* wide_int is not a POD so it must be default-constructed. */
9654 for (unsigned i = 0; i != 2 * len - 1; ++i)
9655 new (static_cast<void*>(folded_deps + i)) wide_int ();
9657 tree folded_dep = NULL_TREE;
9658 /* TRUE if the first dimension's offset is negative. */
9659 bool neg_offset_p = false;
9661 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9662 unsigned int i;
9663 while ((c = *list_p) != NULL)
9665 bool remove = false;
9667 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
9668 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9669 goto next_ordered_clause;
9671 tree vec;
9672 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9673 vec && TREE_CODE (vec) == TREE_LIST;
9674 vec = TREE_CHAIN (vec), ++i)
9676 gcc_assert (i < len);
9678 /* omp_extract_for_data has canonicalized the condition. */
9679 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9680 || fd.loops[i].cond_code == GT_EXPR);
9681 bool forward = fd.loops[i].cond_code == LT_EXPR;
9682 bool maybe_lexically_later = true;
9684 /* While the committee makes up its mind, bail if we have any
9685 non-constant steps. */
9686 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9687 goto lower_omp_ordered_ret;
9689 tree itype = TREE_TYPE (TREE_VALUE (vec));
9690 if (POINTER_TYPE_P (itype))
9691 itype = sizetype;
9692 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9693 TYPE_PRECISION (itype),
9694 TYPE_SIGN (itype));
9696 /* Ignore invalid offsets that are not multiples of the step. */
9697 if (!wi::multiple_of_p (wi::abs (offset),
9698 wi::abs (wi::to_wide (fd.loops[i].step)),
9699 UNSIGNED))
9701 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9702 "ignoring sink clause with offset that is not "
9703 "a multiple of the loop step");
9704 remove = true;
9705 goto next_ordered_clause;
9708 /* Calculate the first dimension. The first dimension of
9709 the folded dependency vector is the GCD of the first
9710 elements, while ignoring any first elements whose offset
9711 is 0. */
9712 if (i == 0)
9714 /* Ignore dependence vectors whose first dimension is 0. */
9715 if (offset == 0)
9717 remove = true;
9718 goto next_ordered_clause;
9720 else
9722 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9724 error_at (OMP_CLAUSE_LOCATION (c),
9725 "first offset must be in opposite direction "
9726 "of loop iterations");
9727 goto lower_omp_ordered_ret;
9729 if (forward)
9730 offset = -offset;
9731 neg_offset_p = forward;
9732 /* Initialize the first time around. */
9733 if (folded_dep == NULL_TREE)
9735 folded_dep = c;
9736 folded_deps[0] = offset;
9738 else
9739 folded_deps[0] = wi::gcd (folded_deps[0],
9740 offset, UNSIGNED);
9743 /* Calculate minimum for the remaining dimensions. */
9744 else
9746 folded_deps[len + i - 1] = offset;
9747 if (folded_dep == c)
9748 folded_deps[i] = offset;
9749 else if (maybe_lexically_later
9750 && !wi::eq_p (folded_deps[i], offset))
9752 if (forward ^ wi::gts_p (folded_deps[i], offset))
9754 unsigned int j;
9755 folded_dep = c;
9756 for (j = 1; j <= i; j++)
9757 folded_deps[j] = folded_deps[len + j - 1];
9759 else
9760 maybe_lexically_later = false;
9764 gcc_assert (i == len);
9766 remove = true;
9768 next_ordered_clause:
9769 if (remove)
9770 *list_p = OMP_CLAUSE_CHAIN (c);
9771 else
9772 list_p = &OMP_CLAUSE_CHAIN (c);
9775 if (folded_dep)
9777 if (neg_offset_p)
9778 folded_deps[0] = -folded_deps[0];
9780 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9781 if (POINTER_TYPE_P (itype))
9782 itype = sizetype;
9784 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9785 = wide_int_to_tree (itype, folded_deps[0]);
9786 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9787 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9790 lower_omp_ordered_ret:
9792 /* Ordered without clauses is #pragma omp threads, while we want
9793 a nop instead if we remove all clauses. */
9794 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9795 gsi_replace (gsi_p, gimple_build_nop (), true);
9799 /* Expand code for an OpenMP ordered directive. */
9801 static void
9802 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9804 tree block;
9805 gimple *stmt = gsi_stmt (*gsi_p), *g;
9806 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9807 gcall *x;
9808 gbind *bind;
9809 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9810 OMP_CLAUSE_SIMD);
9811 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9812 loop. */
9813 bool maybe_simt
9814 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9815 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9816 OMP_CLAUSE_THREADS);
9818 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9819 OMP_CLAUSE_DEPEND))
9821 /* FIXME: This is needs to be moved to the expansion to verify various
9822 conditions only testable on cfg with dominators computed, and also
9823 all the depend clauses to be merged still might need to be available
9824 for the runtime checks. */
9825 if (0)
9826 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9827 return;
9830 push_gimplify_context ();
9832 block = make_node (BLOCK);
9833 bind = gimple_build_bind (NULL, NULL, block);
9834 gsi_replace (gsi_p, bind, true);
9835 gimple_bind_add_stmt (bind, stmt);
9837 if (simd)
9839 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9840 build_int_cst (NULL_TREE, threads));
9841 cfun->has_simduid_loops = true;
9843 else
9844 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9846 gimple_bind_add_stmt (bind, x);
9848 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9849 if (maybe_simt)
9851 counter = create_tmp_var (integer_type_node);
9852 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9853 gimple_call_set_lhs (g, counter);
9854 gimple_bind_add_stmt (bind, g);
9856 body = create_artificial_label (UNKNOWN_LOCATION);
9857 test = create_artificial_label (UNKNOWN_LOCATION);
9858 gimple_bind_add_stmt (bind, gimple_build_label (body));
9860 tree simt_pred = create_tmp_var (integer_type_node);
9861 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9862 gimple_call_set_lhs (g, simt_pred);
9863 gimple_bind_add_stmt (bind, g);
9865 tree t = create_artificial_label (UNKNOWN_LOCATION);
9866 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9867 gimple_bind_add_stmt (bind, g);
9869 gimple_bind_add_stmt (bind, gimple_build_label (t));
9871 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9872 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9873 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9874 gimple_omp_set_body (stmt, NULL);
9876 if (maybe_simt)
9878 gimple_bind_add_stmt (bind, gimple_build_label (test));
9879 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9880 gimple_bind_add_stmt (bind, g);
9882 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9883 tree nonneg = create_tmp_var (integer_type_node);
9884 gimple_seq tseq = NULL;
9885 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9886 gimple_bind_add_seq (bind, tseq);
9888 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9889 gimple_call_set_lhs (g, nonneg);
9890 gimple_bind_add_stmt (bind, g);
9892 tree end = create_artificial_label (UNKNOWN_LOCATION);
9893 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9894 gimple_bind_add_stmt (bind, g);
9896 gimple_bind_add_stmt (bind, gimple_build_label (end));
9898 if (simd)
9899 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9900 build_int_cst (NULL_TREE, threads));
9901 else
9902 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9904 gimple_bind_add_stmt (bind, x);
9906 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9908 pop_gimplify_context (bind);
9910 gimple_bind_append_vars (bind, ctx->block_vars);
9911 BLOCK_VARS (block) = gimple_bind_vars (bind);
9915 /* Expand code for an OpenMP scan directive and the structured block
9916 before the scan directive. */
9918 static void
9919 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9921 gimple *stmt = gsi_stmt (*gsi_p);
9922 bool has_clauses
9923 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9924 tree lane = NULL_TREE;
9925 gimple_seq before = NULL;
9926 omp_context *octx = ctx->outer;
9927 gcc_assert (octx);
9928 if (octx->scan_exclusive && !has_clauses)
9930 gimple_stmt_iterator gsi2 = *gsi_p;
9931 gsi_next (&gsi2);
9932 gimple *stmt2 = gsi_stmt (gsi2);
9933 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9934 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9935 the one with exclusive clause(s), comes first. */
9936 if (stmt2
9937 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9938 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9940 gsi_remove (gsi_p, false);
9941 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9942 ctx = maybe_lookup_ctx (stmt2);
9943 gcc_assert (ctx);
9944 lower_omp_scan (gsi_p, ctx);
9945 return;
9949 bool input_phase = has_clauses ^ octx->scan_inclusive;
9950 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9951 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9952 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9953 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9954 && !gimple_omp_for_combined_p (octx->stmt));
9955 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9956 if (is_for_simd && octx->for_simd_scan_phase)
9957 is_simd = false;
9958 if (is_simd)
9959 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9960 OMP_CLAUSE__SIMDUID_))
9962 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9963 lane = create_tmp_var (unsigned_type_node);
9964 tree t = build_int_cst (integer_type_node,
9965 input_phase ? 1
9966 : octx->scan_inclusive ? 2 : 3);
9967 gimple *g
9968 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9969 gimple_call_set_lhs (g, lane);
9970 gimple_seq_add_stmt (&before, g);
9973 if (is_simd || is_for)
9975 for (tree c = gimple_omp_for_clauses (octx->stmt);
9976 c; c = OMP_CLAUSE_CHAIN (c))
9977 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9978 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9980 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9981 tree var = OMP_CLAUSE_DECL (c);
9982 tree new_var = lookup_decl (var, octx);
9983 tree val = new_var;
9984 tree var2 = NULL_TREE;
9985 tree var3 = NULL_TREE;
9986 tree var4 = NULL_TREE;
9987 tree lane0 = NULL_TREE;
9988 tree new_vard = new_var;
9989 if (omp_is_reference (var))
9991 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9992 val = new_var;
9994 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9996 val = DECL_VALUE_EXPR (new_vard);
9997 if (new_vard != new_var)
9999 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10000 val = TREE_OPERAND (val, 0);
10002 if (TREE_CODE (val) == ARRAY_REF
10003 && VAR_P (TREE_OPERAND (val, 0)))
10005 tree v = TREE_OPERAND (val, 0);
10006 if (lookup_attribute ("omp simd array",
10007 DECL_ATTRIBUTES (v)))
10009 val = unshare_expr (val);
10010 lane0 = TREE_OPERAND (val, 1);
10011 TREE_OPERAND (val, 1) = lane;
10012 var2 = lookup_decl (v, octx);
10013 if (octx->scan_exclusive)
10014 var4 = lookup_decl (var2, octx);
10015 if (input_phase
10016 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10017 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10018 if (!input_phase)
10020 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10021 var2, lane, NULL_TREE, NULL_TREE);
10022 TREE_THIS_NOTRAP (var2) = 1;
10023 if (octx->scan_exclusive)
10025 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10026 var4, lane, NULL_TREE,
10027 NULL_TREE);
10028 TREE_THIS_NOTRAP (var4) = 1;
10031 else
10032 var2 = val;
10035 gcc_assert (var2);
10037 else
10039 var2 = build_outer_var_ref (var, octx);
10040 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10042 var3 = maybe_lookup_decl (new_vard, octx);
10043 if (var3 == new_vard || var3 == NULL_TREE)
10044 var3 = NULL_TREE;
10045 else if (is_simd && octx->scan_exclusive && !input_phase)
10047 var4 = maybe_lookup_decl (var3, octx);
10048 if (var4 == var3 || var4 == NULL_TREE)
10050 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10052 var4 = var3;
10053 var3 = NULL_TREE;
10055 else
10056 var4 = NULL_TREE;
10060 if (is_simd
10061 && octx->scan_exclusive
10062 && !input_phase
10063 && var4 == NULL_TREE)
10064 var4 = create_tmp_var (TREE_TYPE (val));
10066 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10068 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10069 if (input_phase)
10071 if (var3)
10073 /* If we've added a separate identity element
10074 variable, copy it over into val. */
10075 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10076 var3);
10077 gimplify_and_add (x, &before);
10079 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10081 /* Otherwise, assign to it the identity element. */
10082 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10083 if (is_for)
10084 tseq = copy_gimple_seq_and_replace_locals (tseq);
10085 tree ref = build_outer_var_ref (var, octx);
10086 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10087 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10088 if (x)
10090 if (new_vard != new_var)
10091 val = build_fold_addr_expr_loc (clause_loc, val);
10092 SET_DECL_VALUE_EXPR (new_vard, val);
10094 SET_DECL_VALUE_EXPR (placeholder, ref);
10095 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10096 lower_omp (&tseq, octx);
10097 if (x)
10098 SET_DECL_VALUE_EXPR (new_vard, x);
10099 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10100 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10101 gimple_seq_add_seq (&before, tseq);
10102 if (is_simd)
10103 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10106 else if (is_simd)
10108 tree x;
10109 if (octx->scan_exclusive)
10111 tree v4 = unshare_expr (var4);
10112 tree v2 = unshare_expr (var2);
10113 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10114 gimplify_and_add (x, &before);
10116 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10117 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10118 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10119 tree vexpr = val;
10120 if (x && new_vard != new_var)
10121 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10122 if (x)
10123 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10124 SET_DECL_VALUE_EXPR (placeholder, var2);
10125 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10126 lower_omp (&tseq, octx);
10127 gimple_seq_add_seq (&before, tseq);
10128 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10129 if (x)
10130 SET_DECL_VALUE_EXPR (new_vard, x);
10131 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10132 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10133 if (octx->scan_inclusive)
10135 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10136 var2);
10137 gimplify_and_add (x, &before);
10139 else if (lane0 == NULL_TREE)
10141 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10142 var4);
10143 gimplify_and_add (x, &before);
10147 else
10149 if (input_phase)
10151 /* input phase. Set val to initializer before
10152 the body. */
10153 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10154 gimplify_assign (val, x, &before);
10156 else if (is_simd)
10158 /* scan phase. */
10159 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10160 if (code == MINUS_EXPR)
10161 code = PLUS_EXPR;
10163 tree x = build2 (code, TREE_TYPE (var2),
10164 unshare_expr (var2), unshare_expr (val));
10165 if (octx->scan_inclusive)
10167 gimplify_assign (unshare_expr (var2), x, &before);
10168 gimplify_assign (val, var2, &before);
10170 else
10172 gimplify_assign (unshare_expr (var4),
10173 unshare_expr (var2), &before);
10174 gimplify_assign (var2, x, &before);
10175 if (lane0 == NULL_TREE)
10176 gimplify_assign (val, var4, &before);
10180 if (octx->scan_exclusive && !input_phase && lane0)
10182 tree vexpr = unshare_expr (var4);
10183 TREE_OPERAND (vexpr, 1) = lane0;
10184 if (new_vard != new_var)
10185 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10186 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10190 if (is_simd && !is_for_simd)
10192 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10193 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10194 gsi_replace (gsi_p, gimple_build_nop (), true);
10195 return;
10197 lower_omp (gimple_omp_body_ptr (stmt), octx);
10198 if (before)
10200 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
10201 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10206 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10207 substitution of a couple of function calls. But in the NAMED case,
10208 requires that languages coordinate a symbol name. It is therefore
10209 best put here in common code. */
10211 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10213 static void
10214 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10216 tree block;
10217 tree name, lock, unlock;
10218 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10219 gbind *bind;
10220 location_t loc = gimple_location (stmt);
10221 gimple_seq tbody;
10223 name = gimple_omp_critical_name (stmt);
10224 if (name)
10226 tree decl;
10228 if (!critical_name_mutexes)
10229 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10231 tree *n = critical_name_mutexes->get (name);
10232 if (n == NULL)
10234 char *new_str;
10236 decl = create_tmp_var_raw (ptr_type_node);
10238 new_str = ACONCAT ((".gomp_critical_user_",
10239 IDENTIFIER_POINTER (name), NULL));
10240 DECL_NAME (decl) = get_identifier (new_str);
10241 TREE_PUBLIC (decl) = 1;
10242 TREE_STATIC (decl) = 1;
10243 DECL_COMMON (decl) = 1;
10244 DECL_ARTIFICIAL (decl) = 1;
10245 DECL_IGNORED_P (decl) = 1;
10247 varpool_node::finalize_decl (decl);
10249 critical_name_mutexes->put (name, decl);
10251 else
10252 decl = *n;
10254 /* If '#pragma omp critical' is inside offloaded region or
10255 inside function marked as offloadable, the symbol must be
10256 marked as offloadable too. */
10257 omp_context *octx;
10258 if (cgraph_node::get (current_function_decl)->offloadable)
10259 varpool_node::get_create (decl)->offloadable = 1;
10260 else
10261 for (octx = ctx->outer; octx; octx = octx->outer)
10262 if (is_gimple_omp_offloaded (octx->stmt))
10264 varpool_node::get_create (decl)->offloadable = 1;
10265 break;
10268 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10269 lock = build_call_expr_loc (loc, lock, 1,
10270 build_fold_addr_expr_loc (loc, decl));
10272 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10273 unlock = build_call_expr_loc (loc, unlock, 1,
10274 build_fold_addr_expr_loc (loc, decl));
10276 else
10278 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10279 lock = build_call_expr_loc (loc, lock, 0);
10281 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10282 unlock = build_call_expr_loc (loc, unlock, 0);
10285 push_gimplify_context ();
10287 block = make_node (BLOCK);
10288 bind = gimple_build_bind (NULL, NULL, block);
10289 gsi_replace (gsi_p, bind, true);
10290 gimple_bind_add_stmt (bind, stmt);
10292 tbody = gimple_bind_body (bind);
10293 gimplify_and_add (lock, &tbody);
10294 gimple_bind_set_body (bind, tbody);
10296 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10297 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10298 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10299 gimple_omp_set_body (stmt, NULL);
10301 tbody = gimple_bind_body (bind);
10302 gimplify_and_add (unlock, &tbody);
10303 gimple_bind_set_body (bind, tbody);
10305 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10307 pop_gimplify_context (bind);
10308 gimple_bind_append_vars (bind, ctx->block_vars);
10309 BLOCK_VARS (block) = gimple_bind_vars (bind);
10312 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10313 for a lastprivate clause. Given a loop control predicate of (V
10314 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10315 is appended to *DLIST, iterator initialization is appended to
10316 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10317 to be emitted in a critical section. */
10319 static void
10320 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10321 gimple_seq *dlist, gimple_seq *clist,
10322 struct omp_context *ctx)
10324 tree clauses, cond, vinit;
10325 enum tree_code cond_code;
10326 gimple_seq stmts;
10328 cond_code = fd->loop.cond_code;
10329 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10331 /* When possible, use a strict equality expression. This can let VRP
10332 type optimizations deduce the value and remove a copy. */
10333 if (tree_fits_shwi_p (fd->loop.step))
10335 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10336 if (step == 1 || step == -1)
10337 cond_code = EQ_EXPR;
10340 tree n2 = fd->loop.n2;
10341 if (fd->collapse > 1
10342 && TREE_CODE (n2) != INTEGER_CST
10343 && gimple_omp_for_combined_into_p (fd->for_stmt))
10345 struct omp_context *taskreg_ctx = NULL;
10346 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10348 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10349 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10350 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10352 if (gimple_omp_for_combined_into_p (gfor))
10354 gcc_assert (ctx->outer->outer
10355 && is_parallel_ctx (ctx->outer->outer));
10356 taskreg_ctx = ctx->outer->outer;
10358 else
10360 struct omp_for_data outer_fd;
10361 omp_extract_for_data (gfor, &outer_fd, NULL);
10362 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10365 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10366 taskreg_ctx = ctx->outer->outer;
10368 else if (is_taskreg_ctx (ctx->outer))
10369 taskreg_ctx = ctx->outer;
10370 if (taskreg_ctx)
10372 int i;
10373 tree taskreg_clauses
10374 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10375 tree innerc = omp_find_clause (taskreg_clauses,
10376 OMP_CLAUSE__LOOPTEMP_);
10377 gcc_assert (innerc);
10378 int count = fd->collapse;
10379 if (fd->non_rect
10380 && fd->last_nonrect == fd->first_nonrect + 1)
10381 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10382 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10383 count += 4;
10384 for (i = 0; i < count; i++)
10386 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10387 OMP_CLAUSE__LOOPTEMP_);
10388 gcc_assert (innerc);
10390 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10391 OMP_CLAUSE__LOOPTEMP_);
10392 if (innerc)
10393 n2 = fold_convert (TREE_TYPE (n2),
10394 lookup_decl (OMP_CLAUSE_DECL (innerc),
10395 taskreg_ctx));
10398 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10400 clauses = gimple_omp_for_clauses (fd->for_stmt);
10401 stmts = NULL;
10402 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10403 if (!gimple_seq_empty_p (stmts))
10405 gimple_seq_add_seq (&stmts, *dlist);
10406 *dlist = stmts;
10408 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10409 vinit = fd->loop.n1;
10410 if (cond_code == EQ_EXPR
10411 && tree_fits_shwi_p (fd->loop.n2)
10412 && ! integer_zerop (fd->loop.n2))
10413 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10414 else
10415 vinit = unshare_expr (vinit);
10417 /* Initialize the iterator variable, so that threads that don't execute
10418 any iterations don't execute the lastprivate clauses by accident. */
10419 gimplify_assign (fd->loop.v, vinit, body_p);
10423 /* OpenACC privatization.
10425 Or, in other words, *sharing* at the respective OpenACC level of
10426 parallelism.
10428 From a correctness perspective, a non-addressable variable can't be accessed
10429 outside the current thread, so it can go in a (faster than shared memory)
10430 register -- though that register may need to be broadcast in some
10431 circumstances. A variable can only meaningfully be "shared" across workers
10432 or vector lanes if its address is taken, e.g. by a call to an atomic
10433 builtin.
10435 From an optimisation perspective, the answer might be fuzzier: maybe
10436 sometimes, using shared memory directly would be faster than
10437 broadcasting. */
10439 static void
10440 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10441 const location_t loc, const tree c,
10442 const tree decl)
10444 const dump_user_location_t d_u_loc
10445 = dump_user_location_t::from_location_t (loc);
10446 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10447 #if __GNUC__ >= 10
10448 # pragma GCC diagnostic push
10449 # pragma GCC diagnostic ignored "-Wformat"
10450 #endif
10451 dump_printf_loc (l_dump_flags, d_u_loc,
10452 "variable %<%T%> ", decl);
10453 #if __GNUC__ >= 10
10454 # pragma GCC diagnostic pop
10455 #endif
10456 if (c)
10457 dump_printf (l_dump_flags,
10458 "in %qs clause ",
10459 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10460 else
10461 dump_printf (l_dump_flags,
10462 "declared in block ");
10465 static bool
10466 oacc_privatization_candidate_p (const location_t loc, const tree c,
10467 const tree decl)
10469 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10471 /* There is some differentiation depending on block vs. clause. */
10472 bool block = !c;
10474 bool res = true;
10476 if (res && !VAR_P (decl))
10478 res = false;
10480 if (dump_enabled_p ())
10482 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10483 dump_printf (l_dump_flags,
10484 "potentially has improper OpenACC privatization level: %qs\n",
10485 get_tree_code_name (TREE_CODE (decl)));
10489 if (res && block && TREE_STATIC (decl))
10491 res = false;
10493 if (dump_enabled_p ())
10495 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10496 dump_printf (l_dump_flags,
10497 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10498 "static");
10502 if (res && block && DECL_EXTERNAL (decl))
10504 res = false;
10506 if (dump_enabled_p ())
10508 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10509 dump_printf (l_dump_flags,
10510 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10511 "external");
10515 if (res && !TREE_ADDRESSABLE (decl))
10517 res = false;
10519 if (dump_enabled_p ())
10521 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10522 dump_printf (l_dump_flags,
10523 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10524 "not addressable");
10528 if (res)
10530 if (dump_enabled_p ())
10532 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10533 dump_printf (l_dump_flags,
10534 "is candidate for adjusting OpenACC privatization level\n");
10538 if (dump_file && (dump_flags & TDF_DETAILS))
10540 print_generic_decl (dump_file, decl, dump_flags);
10541 fprintf (dump_file, "\n");
10544 return res;
10547 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10548 CTX. */
10550 static void
10551 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10553 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10554 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10556 tree decl = OMP_CLAUSE_DECL (c);
10558 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c, decl))
10559 continue;
10561 gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10562 ctx->oacc_privatization_candidates.safe_push (decl);
10566 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10567 CTX. */
10569 static void
10570 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10572 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10574 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL, decl))
10575 continue;
10577 gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10578 ctx->oacc_privatization_candidates.safe_push (decl);
10582 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10584 static tree
10585 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10586 struct walk_stmt_info *wi)
10588 gimple *stmt = gsi_stmt (*gsi_p);
10590 *handled_ops_p = true;
10591 switch (gimple_code (stmt))
10593 WALK_SUBSTMTS;
10595 case GIMPLE_OMP_FOR:
10596 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10597 && gimple_omp_for_combined_into_p (stmt))
10598 *handled_ops_p = false;
10599 break;
10601 case GIMPLE_OMP_SCAN:
10602 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10603 return integer_zero_node;
10604 default:
10605 break;
10607 return NULL;
10610 /* Helper function for lower_omp_for, add transformations for a worksharing
10611 loop with scan directives inside of it.
10612 For worksharing loop not combined with simd, transform:
10613 #pragma omp for reduction(inscan,+:r) private(i)
10614 for (i = 0; i < n; i = i + 1)
10617 update (r);
10619 #pragma omp scan inclusive(r)
10621 use (r);
10625 into two worksharing loops + code to merge results:
10627 num_threads = omp_get_num_threads ();
10628 thread_num = omp_get_thread_num ();
10629 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10630 <D.2099>:
10631 var2 = r;
10632 goto <D.2101>;
10633 <D.2100>:
10634 // For UDRs this is UDR init, or if ctors are needed, copy from
10635 // var3 that has been constructed to contain the neutral element.
10636 var2 = 0;
10637 <D.2101>:
10638 ivar = 0;
10639 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10640 // a shared array with num_threads elements and rprivb to a local array
10641 // number of elements equal to the number of (contiguous) iterations the
10642 // current thread will perform. controlb and controlp variables are
10643 // temporaries to handle deallocation of rprivb at the end of second
10644 // GOMP_FOR.
10645 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10646 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10647 for (i = 0; i < n; i = i + 1)
10650 // For UDRs this is UDR init or copy from var3.
10651 r = 0;
10652 // This is the input phase from user code.
10653 update (r);
10656 // For UDRs this is UDR merge.
10657 var2 = var2 + r;
10658 // Rather than handing it over to the user, save to local thread's
10659 // array.
10660 rprivb[ivar] = var2;
10661 // For exclusive scan, the above two statements are swapped.
10662 ivar = ivar + 1;
10665 // And remember the final value from this thread's into the shared
10666 // rpriva array.
10667 rpriva[(sizetype) thread_num] = var2;
10668 // If more than one thread, compute using Work-Efficient prefix sum
10669 // the inclusive parallel scan of the rpriva array.
10670 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10671 <D.2102>:
10672 GOMP_barrier ();
10673 down = 0;
10674 k = 1;
10675 num_threadsu = (unsigned int) num_threads;
10676 thread_numup1 = (unsigned int) thread_num + 1;
10677 <D.2108>:
10678 twok = k << 1;
10679 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10680 <D.2110>:
10681 down = 4294967295;
10682 k = k >> 1;
10683 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10684 <D.2112>:
10685 k = k >> 1;
10686 <D.2111>:
10687 twok = k << 1;
10688 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10689 mul = REALPART_EXPR <cplx>;
10690 ovf = IMAGPART_EXPR <cplx>;
10691 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10692 <D.2116>:
10693 andv = k & down;
10694 andvm1 = andv + 4294967295;
10695 l = mul + andvm1;
10696 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10697 <D.2120>:
10698 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10699 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10700 rpriva[l] = rpriva[l - k] + rpriva[l];
10701 <D.2117>:
10702 if (down == 0) goto <D.2121>; else goto <D.2122>;
10703 <D.2121>:
10704 k = k << 1;
10705 goto <D.2123>;
10706 <D.2122>:
10707 k = k >> 1;
10708 <D.2123>:
10709 GOMP_barrier ();
10710 if (k != 0) goto <D.2108>; else goto <D.2103>;
10711 <D.2103>:
10712 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10713 <D.2124>:
10714 // For UDRs this is UDR init or copy from var3.
10715 var2 = 0;
10716 goto <D.2126>;
10717 <D.2125>:
10718 var2 = rpriva[thread_num - 1];
10719 <D.2126>:
10720 ivar = 0;
10721 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10722 reduction(inscan,+:r) private(i)
10723 for (i = 0; i < n; i = i + 1)
10726 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10727 r = var2 + rprivb[ivar];
10730 // This is the scan phase from user code.
10731 use (r);
10732 // Plus a bump of the iterator.
10733 ivar = ivar + 1;
10735 } */
10737 static void
10738 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10739 struct omp_for_data *fd, omp_context *ctx)
10741 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10742 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10744 gimple_seq body = gimple_omp_body (stmt);
10745 gimple_stmt_iterator input1_gsi = gsi_none ();
10746 struct walk_stmt_info wi;
10747 memset (&wi, 0, sizeof (wi));
10748 wi.val_only = true;
10749 wi.info = (void *) &input1_gsi;
10750 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10751 gcc_assert (!gsi_end_p (input1_gsi));
10753 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10754 gimple_stmt_iterator gsi = input1_gsi;
10755 gsi_next (&gsi);
10756 gimple_stmt_iterator scan1_gsi = gsi;
10757 gimple *scan_stmt1 = gsi_stmt (gsi);
10758 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10760 gimple_seq input_body = gimple_omp_body (input_stmt1);
10761 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10762 gimple_omp_set_body (input_stmt1, NULL);
10763 gimple_omp_set_body (scan_stmt1, NULL);
10764 gimple_omp_set_body (stmt, NULL);
10766 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10767 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10768 gimple_omp_set_body (stmt, body);
10769 gimple_omp_set_body (input_stmt1, input_body);
10771 gimple_stmt_iterator input2_gsi = gsi_none ();
10772 memset (&wi, 0, sizeof (wi));
10773 wi.val_only = true;
10774 wi.info = (void *) &input2_gsi;
10775 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10776 gcc_assert (!gsi_end_p (input2_gsi));
10778 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10779 gsi = input2_gsi;
10780 gsi_next (&gsi);
10781 gimple_stmt_iterator scan2_gsi = gsi;
10782 gimple *scan_stmt2 = gsi_stmt (gsi);
10783 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10784 gimple_omp_set_body (scan_stmt2, scan_body);
10786 gimple_stmt_iterator input3_gsi = gsi_none ();
10787 gimple_stmt_iterator scan3_gsi = gsi_none ();
10788 gimple_stmt_iterator input4_gsi = gsi_none ();
10789 gimple_stmt_iterator scan4_gsi = gsi_none ();
10790 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10791 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10792 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10793 if (is_for_simd)
10795 memset (&wi, 0, sizeof (wi));
10796 wi.val_only = true;
10797 wi.info = (void *) &input3_gsi;
10798 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10799 gcc_assert (!gsi_end_p (input3_gsi));
10801 input_stmt3 = gsi_stmt (input3_gsi);
10802 gsi = input3_gsi;
10803 gsi_next (&gsi);
10804 scan3_gsi = gsi;
10805 scan_stmt3 = gsi_stmt (gsi);
10806 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10808 memset (&wi, 0, sizeof (wi));
10809 wi.val_only = true;
10810 wi.info = (void *) &input4_gsi;
10811 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10812 gcc_assert (!gsi_end_p (input4_gsi));
10814 input_stmt4 = gsi_stmt (input4_gsi);
10815 gsi = input4_gsi;
10816 gsi_next (&gsi);
10817 scan4_gsi = gsi;
10818 scan_stmt4 = gsi_stmt (gsi);
10819 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10821 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10822 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10825 tree num_threads = create_tmp_var (integer_type_node);
10826 tree thread_num = create_tmp_var (integer_type_node);
10827 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10828 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10829 gimple *g = gimple_build_call (nthreads_decl, 0);
10830 gimple_call_set_lhs (g, num_threads);
10831 gimple_seq_add_stmt (body_p, g);
10832 g = gimple_build_call (threadnum_decl, 0);
10833 gimple_call_set_lhs (g, thread_num);
10834 gimple_seq_add_stmt (body_p, g);
10836 tree ivar = create_tmp_var (sizetype);
10837 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10838 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10839 tree k = create_tmp_var (unsigned_type_node);
10840 tree l = create_tmp_var (unsigned_type_node);
10842 gimple_seq clist = NULL, mdlist = NULL;
10843 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10844 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10845 gimple_seq scan1_list = NULL, input2_list = NULL;
10846 gimple_seq last_list = NULL, reduc_list = NULL;
10847 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10848 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10849 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10851 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10852 tree var = OMP_CLAUSE_DECL (c);
10853 tree new_var = lookup_decl (var, ctx);
10854 tree var3 = NULL_TREE;
10855 tree new_vard = new_var;
10856 if (omp_is_reference (var))
10857 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10858 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10860 var3 = maybe_lookup_decl (new_vard, ctx);
10861 if (var3 == new_vard)
10862 var3 = NULL_TREE;
10865 tree ptype = build_pointer_type (TREE_TYPE (new_var));
10866 tree rpriva = create_tmp_var (ptype);
10867 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10868 OMP_CLAUSE_DECL (nc) = rpriva;
10869 *cp1 = nc;
10870 cp1 = &OMP_CLAUSE_CHAIN (nc);
10872 tree rprivb = create_tmp_var (ptype);
10873 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10874 OMP_CLAUSE_DECL (nc) = rprivb;
10875 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10876 *cp1 = nc;
10877 cp1 = &OMP_CLAUSE_CHAIN (nc);
10879 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10880 if (new_vard != new_var)
10881 TREE_ADDRESSABLE (var2) = 1;
10882 gimple_add_tmp_var (var2);
10884 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
10885 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10886 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10887 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10888 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
10890 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
10891 thread_num, integer_minus_one_node);
10892 x = fold_convert_loc (clause_loc, sizetype, x);
10893 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10894 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10895 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10896 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
10898 x = fold_convert_loc (clause_loc, sizetype, l);
10899 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10900 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10901 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10902 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
10904 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
10905 x = fold_convert_loc (clause_loc, sizetype, x);
10906 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10907 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10908 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10909 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
10911 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
10912 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10913 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
10914 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
10916 tree var4 = is_for_simd ? new_var : var2;
10917 tree var5 = NULL_TREE, var6 = NULL_TREE;
10918 if (is_for_simd)
10920 var5 = lookup_decl (var, input_simd_ctx);
10921 var6 = lookup_decl (var, scan_simd_ctx);
10922 if (new_vard != new_var)
10924 var5 = build_simple_mem_ref_loc (clause_loc, var5);
10925 var6 = build_simple_mem_ref_loc (clause_loc, var6);
10928 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10930 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10931 tree val = var2;
10933 x = lang_hooks.decls.omp_clause_default_ctor
10934 (c, var2, build_outer_var_ref (var, ctx));
10935 if (x)
10936 gimplify_and_add (x, &clist);
10938 x = build_outer_var_ref (var, ctx);
10939 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
10941 gimplify_and_add (x, &thr01_list);
10943 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
10944 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10945 if (var3)
10947 x = unshare_expr (var4);
10948 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10949 gimplify_and_add (x, &thrn1_list);
10950 x = unshare_expr (var4);
10951 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10952 gimplify_and_add (x, &thr02_list);
10954 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10956 /* Otherwise, assign to it the identity element. */
10957 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10958 tseq = copy_gimple_seq_and_replace_locals (tseq);
10959 if (!is_for_simd)
10961 if (new_vard != new_var)
10962 val = build_fold_addr_expr_loc (clause_loc, val);
10963 SET_DECL_VALUE_EXPR (new_vard, val);
10964 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10966 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
10967 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10968 lower_omp (&tseq, ctx);
10969 gimple_seq_add_seq (&thrn1_list, tseq);
10970 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10971 lower_omp (&tseq, ctx);
10972 gimple_seq_add_seq (&thr02_list, tseq);
10973 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10974 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10975 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10976 if (y)
10977 SET_DECL_VALUE_EXPR (new_vard, y);
10978 else
10980 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10981 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10985 x = unshare_expr (var4);
10986 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
10987 gimplify_and_add (x, &thrn2_list);
10989 if (is_for_simd)
10991 x = unshare_expr (rprivb_ref);
10992 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
10993 gimplify_and_add (x, &scan1_list);
10995 else
10997 if (ctx->scan_exclusive)
10999 x = unshare_expr (rprivb_ref);
11000 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11001 gimplify_and_add (x, &scan1_list);
11004 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11005 tseq = copy_gimple_seq_and_replace_locals (tseq);
11006 SET_DECL_VALUE_EXPR (placeholder, var2);
11007 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11008 lower_omp (&tseq, ctx);
11009 gimple_seq_add_seq (&scan1_list, tseq);
11011 if (ctx->scan_inclusive)
11013 x = unshare_expr (rprivb_ref);
11014 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11015 gimplify_and_add (x, &scan1_list);
11019 x = unshare_expr (rpriva_ref);
11020 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11021 unshare_expr (var4));
11022 gimplify_and_add (x, &mdlist);
11024 x = unshare_expr (is_for_simd ? var6 : new_var);
11025 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11026 gimplify_and_add (x, &input2_list);
11028 val = rprivb_ref;
11029 if (new_vard != new_var)
11030 val = build_fold_addr_expr_loc (clause_loc, val);
11032 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11033 tseq = copy_gimple_seq_and_replace_locals (tseq);
11034 SET_DECL_VALUE_EXPR (new_vard, val);
11035 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11036 if (is_for_simd)
11038 SET_DECL_VALUE_EXPR (placeholder, var6);
11039 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11041 else
11042 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11043 lower_omp (&tseq, ctx);
11044 if (y)
11045 SET_DECL_VALUE_EXPR (new_vard, y);
11046 else
11048 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11049 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11051 if (!is_for_simd)
11053 SET_DECL_VALUE_EXPR (placeholder, new_var);
11054 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11055 lower_omp (&tseq, ctx);
11057 gimple_seq_add_seq (&input2_list, tseq);
11059 x = build_outer_var_ref (var, ctx);
11060 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11061 gimplify_and_add (x, &last_list);
11063 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11064 gimplify_and_add (x, &reduc_list);
11065 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11066 tseq = copy_gimple_seq_and_replace_locals (tseq);
11067 val = rprival_ref;
11068 if (new_vard != new_var)
11069 val = build_fold_addr_expr_loc (clause_loc, val);
11070 SET_DECL_VALUE_EXPR (new_vard, val);
11071 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11072 SET_DECL_VALUE_EXPR (placeholder, var2);
11073 lower_omp (&tseq, ctx);
11074 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11075 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11076 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11077 if (y)
11078 SET_DECL_VALUE_EXPR (new_vard, y);
11079 else
11081 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11082 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11084 gimple_seq_add_seq (&reduc_list, tseq);
11085 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11086 gimplify_and_add (x, &reduc_list);
11088 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11089 if (x)
11090 gimplify_and_add (x, dlist);
11092 else
11094 x = build_outer_var_ref (var, ctx);
11095 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11097 x = omp_reduction_init (c, TREE_TYPE (new_var));
11098 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11099 &thrn1_list);
11100 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11102 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11104 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11105 if (code == MINUS_EXPR)
11106 code = PLUS_EXPR;
11108 if (is_for_simd)
11109 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11110 else
11112 if (ctx->scan_exclusive)
11113 gimplify_assign (unshare_expr (rprivb_ref), var2,
11114 &scan1_list);
11115 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11116 gimplify_assign (var2, x, &scan1_list);
11117 if (ctx->scan_inclusive)
11118 gimplify_assign (unshare_expr (rprivb_ref), var2,
11119 &scan1_list);
11122 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11123 &mdlist);
11125 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11126 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11128 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11129 &last_list);
11131 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11132 unshare_expr (rprival_ref));
11133 gimplify_assign (rprival_ref, x, &reduc_list);
11137 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11138 gimple_seq_add_stmt (&scan1_list, g);
11139 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11140 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11141 ? scan_stmt4 : scan_stmt2), g);
11143 tree controlb = create_tmp_var (boolean_type_node);
11144 tree controlp = create_tmp_var (ptr_type_node);
11145 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11146 OMP_CLAUSE_DECL (nc) = controlb;
11147 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11148 *cp1 = nc;
11149 cp1 = &OMP_CLAUSE_CHAIN (nc);
11150 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11151 OMP_CLAUSE_DECL (nc) = controlp;
11152 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11153 *cp1 = nc;
11154 cp1 = &OMP_CLAUSE_CHAIN (nc);
11155 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11156 OMP_CLAUSE_DECL (nc) = controlb;
11157 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11158 *cp2 = nc;
11159 cp2 = &OMP_CLAUSE_CHAIN (nc);
11160 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11161 OMP_CLAUSE_DECL (nc) = controlp;
11162 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11163 *cp2 = nc;
11164 cp2 = &OMP_CLAUSE_CHAIN (nc);
11166 *cp1 = gimple_omp_for_clauses (stmt);
11167 gimple_omp_for_set_clauses (stmt, new_clauses1);
11168 *cp2 = gimple_omp_for_clauses (new_stmt);
11169 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11171 if (is_for_simd)
11173 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11174 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11176 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11177 GSI_SAME_STMT);
11178 gsi_remove (&input3_gsi, true);
11179 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11180 GSI_SAME_STMT);
11181 gsi_remove (&scan3_gsi, true);
11182 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11183 GSI_SAME_STMT);
11184 gsi_remove (&input4_gsi, true);
11185 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11186 GSI_SAME_STMT);
11187 gsi_remove (&scan4_gsi, true);
11189 else
11191 gimple_omp_set_body (scan_stmt1, scan1_list);
11192 gimple_omp_set_body (input_stmt2, input2_list);
11195 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11196 GSI_SAME_STMT);
11197 gsi_remove (&input1_gsi, true);
11198 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11199 GSI_SAME_STMT);
11200 gsi_remove (&scan1_gsi, true);
11201 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11202 GSI_SAME_STMT);
11203 gsi_remove (&input2_gsi, true);
11204 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11205 GSI_SAME_STMT);
11206 gsi_remove (&scan2_gsi, true);
11208 gimple_seq_add_seq (body_p, clist);
11210 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11211 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11212 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11213 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11214 gimple_seq_add_stmt (body_p, g);
11215 g = gimple_build_label (lab1);
11216 gimple_seq_add_stmt (body_p, g);
11217 gimple_seq_add_seq (body_p, thr01_list);
11218 g = gimple_build_goto (lab3);
11219 gimple_seq_add_stmt (body_p, g);
11220 g = gimple_build_label (lab2);
11221 gimple_seq_add_stmt (body_p, g);
11222 gimple_seq_add_seq (body_p, thrn1_list);
11223 g = gimple_build_label (lab3);
11224 gimple_seq_add_stmt (body_p, g);
11226 g = gimple_build_assign (ivar, size_zero_node);
11227 gimple_seq_add_stmt (body_p, g);
11229 gimple_seq_add_stmt (body_p, stmt);
11230 gimple_seq_add_seq (body_p, body);
11231 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11232 fd->loop.v));
11234 g = gimple_build_omp_return (true);
11235 gimple_seq_add_stmt (body_p, g);
11236 gimple_seq_add_seq (body_p, mdlist);
11238 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11239 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11240 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11241 gimple_seq_add_stmt (body_p, g);
11242 g = gimple_build_label (lab1);
11243 gimple_seq_add_stmt (body_p, g);
11245 g = omp_build_barrier (NULL);
11246 gimple_seq_add_stmt (body_p, g);
11248 tree down = create_tmp_var (unsigned_type_node);
11249 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11250 gimple_seq_add_stmt (body_p, g);
11252 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11253 gimple_seq_add_stmt (body_p, g);
11255 tree num_threadsu = create_tmp_var (unsigned_type_node);
11256 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11257 gimple_seq_add_stmt (body_p, g);
11259 tree thread_numu = create_tmp_var (unsigned_type_node);
11260 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11261 gimple_seq_add_stmt (body_p, g);
11263 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11264 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11265 build_int_cst (unsigned_type_node, 1));
11266 gimple_seq_add_stmt (body_p, g);
11268 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11269 g = gimple_build_label (lab3);
11270 gimple_seq_add_stmt (body_p, g);
11272 tree twok = create_tmp_var (unsigned_type_node);
11273 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11274 gimple_seq_add_stmt (body_p, g);
11276 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11277 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11278 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11279 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11280 gimple_seq_add_stmt (body_p, g);
11281 g = gimple_build_label (lab4);
11282 gimple_seq_add_stmt (body_p, g);
11283 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11284 gimple_seq_add_stmt (body_p, g);
11285 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11286 gimple_seq_add_stmt (body_p, g);
11288 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11289 gimple_seq_add_stmt (body_p, g);
11290 g = gimple_build_label (lab6);
11291 gimple_seq_add_stmt (body_p, g);
11293 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11294 gimple_seq_add_stmt (body_p, g);
11296 g = gimple_build_label (lab5);
11297 gimple_seq_add_stmt (body_p, g);
11299 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11300 gimple_seq_add_stmt (body_p, g);
11302 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11303 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11304 gimple_call_set_lhs (g, cplx);
11305 gimple_seq_add_stmt (body_p, g);
11306 tree mul = create_tmp_var (unsigned_type_node);
11307 g = gimple_build_assign (mul, REALPART_EXPR,
11308 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11309 gimple_seq_add_stmt (body_p, g);
11310 tree ovf = create_tmp_var (unsigned_type_node);
11311 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11312 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11313 gimple_seq_add_stmt (body_p, g);
11315 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11316 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11317 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11318 lab7, lab8);
11319 gimple_seq_add_stmt (body_p, g);
11320 g = gimple_build_label (lab7);
11321 gimple_seq_add_stmt (body_p, g);
11323 tree andv = create_tmp_var (unsigned_type_node);
11324 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11325 gimple_seq_add_stmt (body_p, g);
11326 tree andvm1 = create_tmp_var (unsigned_type_node);
11327 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11328 build_minus_one_cst (unsigned_type_node));
11329 gimple_seq_add_stmt (body_p, g);
11331 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11332 gimple_seq_add_stmt (body_p, g);
11334 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11335 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11336 gimple_seq_add_stmt (body_p, g);
11337 g = gimple_build_label (lab9);
11338 gimple_seq_add_stmt (body_p, g);
11339 gimple_seq_add_seq (body_p, reduc_list);
11340 g = gimple_build_label (lab8);
11341 gimple_seq_add_stmt (body_p, g);
11343 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11344 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11345 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11346 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11347 lab10, lab11);
11348 gimple_seq_add_stmt (body_p, g);
11349 g = gimple_build_label (lab10);
11350 gimple_seq_add_stmt (body_p, g);
11351 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11352 gimple_seq_add_stmt (body_p, g);
11353 g = gimple_build_goto (lab12);
11354 gimple_seq_add_stmt (body_p, g);
11355 g = gimple_build_label (lab11);
11356 gimple_seq_add_stmt (body_p, g);
11357 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11358 gimple_seq_add_stmt (body_p, g);
11359 g = gimple_build_label (lab12);
11360 gimple_seq_add_stmt (body_p, g);
11362 g = omp_build_barrier (NULL);
11363 gimple_seq_add_stmt (body_p, g);
11365 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11366 lab3, lab2);
11367 gimple_seq_add_stmt (body_p, g);
11369 g = gimple_build_label (lab2);
11370 gimple_seq_add_stmt (body_p, g);
11372 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11373 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11374 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11375 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11376 gimple_seq_add_stmt (body_p, g);
11377 g = gimple_build_label (lab1);
11378 gimple_seq_add_stmt (body_p, g);
11379 gimple_seq_add_seq (body_p, thr02_list);
11380 g = gimple_build_goto (lab3);
11381 gimple_seq_add_stmt (body_p, g);
11382 g = gimple_build_label (lab2);
11383 gimple_seq_add_stmt (body_p, g);
11384 gimple_seq_add_seq (body_p, thrn2_list);
11385 g = gimple_build_label (lab3);
11386 gimple_seq_add_stmt (body_p, g);
11388 g = gimple_build_assign (ivar, size_zero_node);
11389 gimple_seq_add_stmt (body_p, g);
11390 gimple_seq_add_stmt (body_p, new_stmt);
11391 gimple_seq_add_seq (body_p, new_body);
11393 gimple_seq new_dlist = NULL;
11394 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11395 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11396 tree num_threadsm1 = create_tmp_var (integer_type_node);
11397 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11398 integer_minus_one_node);
11399 gimple_seq_add_stmt (&new_dlist, g);
11400 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11401 gimple_seq_add_stmt (&new_dlist, g);
11402 g = gimple_build_label (lab1);
11403 gimple_seq_add_stmt (&new_dlist, g);
11404 gimple_seq_add_seq (&new_dlist, last_list);
11405 g = gimple_build_label (lab2);
11406 gimple_seq_add_stmt (&new_dlist, g);
11407 gimple_seq_add_seq (&new_dlist, *dlist);
11408 *dlist = new_dlist;
11411 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11412 the addresses of variables to be made private at the surrounding
11413 parallelism level. Such functions appear in the gimple code stream in two
11414 forms, e.g. for a partitioned loop:
11416 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11417 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11418 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11419 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11421 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11422 not as part of a HEAD_MARK sequence:
11424 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11426 For such stand-alone appearances, the 3rd argument is always 0, denoting
11427 gang partitioning. */
11429 static gcall *
11430 lower_oacc_private_marker (omp_context *ctx)
11432 if (ctx->oacc_privatization_candidates.length () == 0)
11433 return NULL;
11435 auto_vec<tree, 5> args;
11437 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11438 args.quick_push (integer_zero_node);
11439 args.quick_push (integer_minus_one_node);
11441 int i;
11442 tree decl;
11443 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11445 for (omp_context *thisctx = ctx; thisctx; thisctx = thisctx->outer)
11447 tree inner_decl = maybe_lookup_decl (decl, thisctx);
11448 if (inner_decl)
11450 decl = inner_decl;
11451 break;
11454 gcc_checking_assert (decl);
11456 tree addr = build_fold_addr_expr (decl);
11457 args.safe_push (addr);
11460 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11463 /* Lower code for an OMP loop directive. */
11465 static void
11466 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11468 tree *rhs_p, block;
11469 struct omp_for_data fd, *fdp = NULL;
11470 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11471 gbind *new_stmt;
11472 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11473 gimple_seq cnt_list = NULL, clist = NULL;
11474 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11475 size_t i;
11477 push_gimplify_context ();
11479 if (is_gimple_omp_oacc (ctx->stmt))
11480 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11482 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11484 block = make_node (BLOCK);
11485 new_stmt = gimple_build_bind (NULL, NULL, block);
11486 /* Replace at gsi right away, so that 'stmt' is no member
11487 of a sequence anymore as we're going to add to a different
11488 one below. */
11489 gsi_replace (gsi_p, new_stmt, true);
11491 /* Move declaration of temporaries in the loop body before we make
11492 it go away. */
11493 omp_for_body = gimple_omp_body (stmt);
11494 if (!gimple_seq_empty_p (omp_for_body)
11495 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11497 gbind *inner_bind
11498 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11499 tree vars = gimple_bind_vars (inner_bind);
11500 if (is_gimple_omp_oacc (ctx->stmt))
11501 oacc_privatization_scan_decl_chain (ctx, vars);
11502 gimple_bind_append_vars (new_stmt, vars);
11503 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11504 keep them on the inner_bind and it's block. */
11505 gimple_bind_set_vars (inner_bind, NULL_TREE);
11506 if (gimple_bind_block (inner_bind))
11507 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11510 if (gimple_omp_for_combined_into_p (stmt))
11512 omp_extract_for_data (stmt, &fd, NULL);
11513 fdp = &fd;
11515 /* We need two temporaries with fd.loop.v type (istart/iend)
11516 and then (fd.collapse - 1) temporaries with the same
11517 type for count2 ... countN-1 vars if not constant. */
11518 size_t count = 2;
11519 tree type = fd.iter_type;
11520 if (fd.collapse > 1
11521 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11522 count += fd.collapse - 1;
11523 size_t count2 = 0;
11524 tree type2 = NULL_TREE;
11525 bool taskreg_for
11526 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11527 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11528 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11529 tree simtc = NULL;
11530 tree clauses = *pc;
11531 if (fd.collapse > 1
11532 && fd.non_rect
11533 && fd.last_nonrect == fd.first_nonrect + 1
11534 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11535 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11536 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11538 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11539 type2 = TREE_TYPE (v);
11540 count++;
11541 count2 = 3;
11543 if (taskreg_for)
11544 outerc
11545 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11546 OMP_CLAUSE__LOOPTEMP_);
11547 if (ctx->simt_stmt)
11548 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11549 OMP_CLAUSE__LOOPTEMP_);
11550 for (i = 0; i < count + count2; i++)
11552 tree temp;
11553 if (taskreg_for)
11555 gcc_assert (outerc);
11556 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11557 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11558 OMP_CLAUSE__LOOPTEMP_);
11560 else
11562 /* If there are 2 adjacent SIMD stmts, one with _simt_
11563 clause, another without, make sure they have the same
11564 decls in _looptemp_ clauses, because the outer stmt
11565 they are combined into will look up just one inner_stmt. */
11566 if (ctx->simt_stmt)
11567 temp = OMP_CLAUSE_DECL (simtc);
11568 else
11569 temp = create_tmp_var (i >= count ? type2 : type);
11570 insert_decl_map (&ctx->outer->cb, temp, temp);
11572 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11573 OMP_CLAUSE_DECL (*pc) = temp;
11574 pc = &OMP_CLAUSE_CHAIN (*pc);
11575 if (ctx->simt_stmt)
11576 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11577 OMP_CLAUSE__LOOPTEMP_);
11579 *pc = clauses;
11582 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11583 dlist = NULL;
11584 body = NULL;
11585 tree rclauses
11586 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11587 OMP_CLAUSE_REDUCTION);
11588 tree rtmp = NULL_TREE;
11589 if (rclauses)
11591 tree type = build_pointer_type (pointer_sized_int_node);
11592 tree temp = create_tmp_var (type);
11593 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11594 OMP_CLAUSE_DECL (c) = temp;
11595 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11596 gimple_omp_for_set_clauses (stmt, c);
11597 lower_omp_task_reductions (ctx, OMP_FOR,
11598 gimple_omp_for_clauses (stmt),
11599 &tred_ilist, &tred_dlist);
11600 rclauses = c;
11601 rtmp = make_ssa_name (type);
11602 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11605 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11606 ctx);
11608 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11609 fdp);
11610 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11611 gimple_omp_for_pre_body (stmt));
11613 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11615 gcall *private_marker = NULL;
11616 if (is_gimple_omp_oacc (ctx->stmt)
11617 && !gimple_seq_empty_p (omp_for_body))
11618 private_marker = lower_oacc_private_marker (ctx);
11620 /* Lower the header expressions. At this point, we can assume that
11621 the header is of the form:
11623 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11625 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11626 using the .omp_data_s mapping, if needed. */
11627 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11629 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11630 if (TREE_CODE (*rhs_p) == TREE_VEC)
11632 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11633 TREE_VEC_ELT (*rhs_p, 1)
11634 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11635 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11636 TREE_VEC_ELT (*rhs_p, 2)
11637 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11639 else if (!is_gimple_min_invariant (*rhs_p))
11640 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11641 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11642 recompute_tree_invariant_for_addr_expr (*rhs_p);
11644 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11645 if (TREE_CODE (*rhs_p) == TREE_VEC)
11647 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11648 TREE_VEC_ELT (*rhs_p, 1)
11649 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11650 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11651 TREE_VEC_ELT (*rhs_p, 2)
11652 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11654 else if (!is_gimple_min_invariant (*rhs_p))
11655 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11656 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11657 recompute_tree_invariant_for_addr_expr (*rhs_p);
11659 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11660 if (!is_gimple_min_invariant (*rhs_p))
11661 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11663 if (rclauses)
11664 gimple_seq_add_seq (&tred_ilist, cnt_list);
11665 else
11666 gimple_seq_add_seq (&body, cnt_list);
11668 /* Once lowered, extract the bounds and clauses. */
11669 omp_extract_for_data (stmt, &fd, NULL);
11671 if (is_gimple_omp_oacc (ctx->stmt)
11672 && !ctx_in_oacc_kernels_region (ctx))
11673 lower_oacc_head_tail (gimple_location (stmt),
11674 gimple_omp_for_clauses (stmt), private_marker,
11675 &oacc_head, &oacc_tail, ctx);
11677 /* Add OpenACC partitioning and reduction markers just before the loop. */
11678 if (oacc_head)
11679 gimple_seq_add_seq (&body, oacc_head);
11681 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11683 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11684 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11685 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11686 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11688 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11689 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11690 OMP_CLAUSE_LINEAR_STEP (c)
11691 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11692 ctx);
11695 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11696 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11697 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11698 else
11700 gimple_seq_add_stmt (&body, stmt);
11701 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11704 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11705 fd.loop.v));
11707 /* After the loop, add exit clauses. */
11708 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11710 if (clist)
11712 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11713 gcall *g = gimple_build_call (fndecl, 0);
11714 gimple_seq_add_stmt (&body, g);
11715 gimple_seq_add_seq (&body, clist);
11716 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11717 g = gimple_build_call (fndecl, 0);
11718 gimple_seq_add_stmt (&body, g);
11721 if (ctx->cancellable)
11722 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11724 gimple_seq_add_seq (&body, dlist);
11726 if (rclauses)
11728 gimple_seq_add_seq (&tred_ilist, body);
11729 body = tred_ilist;
11732 body = maybe_catch_exception (body);
11734 /* Region exit marker goes at the end of the loop body. */
11735 gimple *g = gimple_build_omp_return (fd.have_nowait);
11736 gimple_seq_add_stmt (&body, g);
11738 gimple_seq_add_seq (&body, tred_dlist);
11740 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11742 if (rclauses)
11743 OMP_CLAUSE_DECL (rclauses) = rtmp;
11745 /* Add OpenACC joining and reduction markers just after the loop. */
11746 if (oacc_tail)
11747 gimple_seq_add_seq (&body, oacc_tail);
11749 pop_gimplify_context (new_stmt);
11751 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11752 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11753 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11754 if (BLOCK_VARS (block))
11755 TREE_USED (block) = 1;
11757 gimple_bind_set_body (new_stmt, body);
11758 gimple_omp_set_body (stmt, NULL);
11759 gimple_omp_for_set_pre_body (stmt, NULL);
11762 /* Callback for walk_stmts. Check if the current statement only contains
11763 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11765 static tree
11766 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11767 bool *handled_ops_p,
11768 struct walk_stmt_info *wi)
11770 int *info = (int *) wi->info;
11771 gimple *stmt = gsi_stmt (*gsi_p);
11773 *handled_ops_p = true;
11774 switch (gimple_code (stmt))
11776 WALK_SUBSTMTS;
11778 case GIMPLE_DEBUG:
11779 break;
11780 case GIMPLE_OMP_FOR:
11781 case GIMPLE_OMP_SECTIONS:
11782 *info = *info == 0 ? 1 : -1;
11783 break;
11784 default:
11785 *info = -1;
11786 break;
11788 return NULL;
11791 struct omp_taskcopy_context
11793 /* This field must be at the beginning, as we do "inheritance": Some
11794 callback functions for tree-inline.c (e.g., omp_copy_decl)
11795 receive a copy_body_data pointer that is up-casted to an
11796 omp_context pointer. */
11797 copy_body_data cb;
11798 omp_context *ctx;
11801 static tree
11802 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11804 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11806 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11807 return create_tmp_var (TREE_TYPE (var));
11809 return var;
11812 static tree
11813 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11815 tree name, new_fields = NULL, type, f;
11817 type = lang_hooks.types.make_type (RECORD_TYPE);
11818 name = DECL_NAME (TYPE_NAME (orig_type));
11819 name = build_decl (gimple_location (tcctx->ctx->stmt),
11820 TYPE_DECL, name, type);
11821 TYPE_NAME (type) = name;
11823 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11825 tree new_f = copy_node (f);
11826 DECL_CONTEXT (new_f) = type;
11827 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11828 TREE_CHAIN (new_f) = new_fields;
11829 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11830 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11831 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11832 &tcctx->cb, NULL);
11833 new_fields = new_f;
11834 tcctx->cb.decl_map->put (f, new_f);
11836 TYPE_FIELDS (type) = nreverse (new_fields);
11837 layout_type (type);
11838 return type;
11841 /* Create task copyfn. */
11843 static void
11844 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11846 struct function *child_cfun;
11847 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11848 tree record_type, srecord_type, bind, list;
11849 bool record_needs_remap = false, srecord_needs_remap = false;
11850 splay_tree_node n;
11851 struct omp_taskcopy_context tcctx;
11852 location_t loc = gimple_location (task_stmt);
11853 size_t looptempno = 0;
11855 child_fn = gimple_omp_task_copy_fn (task_stmt);
11856 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11857 gcc_assert (child_cfun->cfg == NULL);
11858 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11860 /* Reset DECL_CONTEXT on function arguments. */
11861 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11862 DECL_CONTEXT (t) = child_fn;
11864 /* Populate the function. */
11865 push_gimplify_context ();
11866 push_cfun (child_cfun);
11868 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11869 TREE_SIDE_EFFECTS (bind) = 1;
11870 list = NULL;
11871 DECL_SAVED_TREE (child_fn) = bind;
11872 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
11874 /* Remap src and dst argument types if needed. */
11875 record_type = ctx->record_type;
11876 srecord_type = ctx->srecord_type;
11877 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
11878 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11880 record_needs_remap = true;
11881 break;
11883 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
11884 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11886 srecord_needs_remap = true;
11887 break;
11890 if (record_needs_remap || srecord_needs_remap)
11892 memset (&tcctx, '\0', sizeof (tcctx));
11893 tcctx.cb.src_fn = ctx->cb.src_fn;
11894 tcctx.cb.dst_fn = child_fn;
11895 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
11896 gcc_checking_assert (tcctx.cb.src_node);
11897 tcctx.cb.dst_node = tcctx.cb.src_node;
11898 tcctx.cb.src_cfun = ctx->cb.src_cfun;
11899 tcctx.cb.copy_decl = task_copyfn_copy_decl;
11900 tcctx.cb.eh_lp_nr = 0;
11901 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
11902 tcctx.cb.decl_map = new hash_map<tree, tree>;
11903 tcctx.ctx = ctx;
11905 if (record_needs_remap)
11906 record_type = task_copyfn_remap_type (&tcctx, record_type);
11907 if (srecord_needs_remap)
11908 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
11910 else
11911 tcctx.cb.decl_map = NULL;
11913 arg = DECL_ARGUMENTS (child_fn);
11914 TREE_TYPE (arg) = build_pointer_type (record_type);
11915 sarg = DECL_CHAIN (arg);
11916 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
11918 /* First pass: initialize temporaries used in record_type and srecord_type
11919 sizes and field offsets. */
11920 if (tcctx.cb.decl_map)
11921 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11922 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11924 tree *p;
11926 decl = OMP_CLAUSE_DECL (c);
11927 p = tcctx.cb.decl_map->get (decl);
11928 if (p == NULL)
11929 continue;
11930 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11931 sf = (tree) n->value;
11932 sf = *tcctx.cb.decl_map->get (sf);
11933 src = build_simple_mem_ref_loc (loc, sarg);
11934 src = omp_build_component_ref (src, sf);
11935 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
11936 append_to_statement_list (t, &list);
11939 /* Second pass: copy shared var pointers and copy construct non-VLA
11940 firstprivate vars. */
11941 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11942 switch (OMP_CLAUSE_CODE (c))
11944 splay_tree_key key;
11945 case OMP_CLAUSE_SHARED:
11946 decl = OMP_CLAUSE_DECL (c);
11947 key = (splay_tree_key) decl;
11948 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
11949 key = (splay_tree_key) &DECL_UID (decl);
11950 n = splay_tree_lookup (ctx->field_map, key);
11951 if (n == NULL)
11952 break;
11953 f = (tree) n->value;
11954 if (tcctx.cb.decl_map)
11955 f = *tcctx.cb.decl_map->get (f);
11956 n = splay_tree_lookup (ctx->sfield_map, key);
11957 sf = (tree) n->value;
11958 if (tcctx.cb.decl_map)
11959 sf = *tcctx.cb.decl_map->get (sf);
11960 src = build_simple_mem_ref_loc (loc, sarg);
11961 src = omp_build_component_ref (src, sf);
11962 dst = build_simple_mem_ref_loc (loc, arg);
11963 dst = omp_build_component_ref (dst, f);
11964 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11965 append_to_statement_list (t, &list);
11966 break;
11967 case OMP_CLAUSE_REDUCTION:
11968 case OMP_CLAUSE_IN_REDUCTION:
11969 decl = OMP_CLAUSE_DECL (c);
11970 if (TREE_CODE (decl) == MEM_REF)
11972 decl = TREE_OPERAND (decl, 0);
11973 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
11974 decl = TREE_OPERAND (decl, 0);
11975 if (TREE_CODE (decl) == INDIRECT_REF
11976 || TREE_CODE (decl) == ADDR_EXPR)
11977 decl = TREE_OPERAND (decl, 0);
11979 key = (splay_tree_key) decl;
11980 n = splay_tree_lookup (ctx->field_map, key);
11981 if (n == NULL)
11982 break;
11983 f = (tree) n->value;
11984 if (tcctx.cb.decl_map)
11985 f = *tcctx.cb.decl_map->get (f);
11986 n = splay_tree_lookup (ctx->sfield_map, key);
11987 sf = (tree) n->value;
11988 if (tcctx.cb.decl_map)
11989 sf = *tcctx.cb.decl_map->get (sf);
11990 src = build_simple_mem_ref_loc (loc, sarg);
11991 src = omp_build_component_ref (src, sf);
11992 if (decl != OMP_CLAUSE_DECL (c)
11993 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11994 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
11995 src = build_simple_mem_ref_loc (loc, src);
11996 dst = build_simple_mem_ref_loc (loc, arg);
11997 dst = omp_build_component_ref (dst, f);
11998 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11999 append_to_statement_list (t, &list);
12000 break;
12001 case OMP_CLAUSE__LOOPTEMP_:
12002 /* Fields for first two _looptemp_ clauses are initialized by
12003 GOMP_taskloop*, the rest are handled like firstprivate. */
12004 if (looptempno < 2)
12006 looptempno++;
12007 break;
12009 /* FALLTHRU */
12010 case OMP_CLAUSE__REDUCTEMP_:
12011 case OMP_CLAUSE_FIRSTPRIVATE:
12012 decl = OMP_CLAUSE_DECL (c);
12013 if (is_variable_sized (decl))
12014 break;
12015 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12016 if (n == NULL)
12017 break;
12018 f = (tree) n->value;
12019 if (tcctx.cb.decl_map)
12020 f = *tcctx.cb.decl_map->get (f);
12021 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12022 if (n != NULL)
12024 sf = (tree) n->value;
12025 if (tcctx.cb.decl_map)
12026 sf = *tcctx.cb.decl_map->get (sf);
12027 src = build_simple_mem_ref_loc (loc, sarg);
12028 src = omp_build_component_ref (src, sf);
12029 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
12030 src = build_simple_mem_ref_loc (loc, src);
12032 else
12033 src = decl;
12034 dst = build_simple_mem_ref_loc (loc, arg);
12035 dst = omp_build_component_ref (dst, f);
12036 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12037 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12038 else
12040 if (ctx->allocate_map)
12041 if (tree *allocatorp = ctx->allocate_map->get (decl))
12043 tree allocator = *allocatorp;
12044 if (TREE_CODE (allocator) != INTEGER_CST)
12046 n = splay_tree_lookup (ctx->sfield_map,
12047 (splay_tree_key) allocator);
12048 allocator = (tree) n->value;
12049 if (tcctx.cb.decl_map)
12050 allocator = *tcctx.cb.decl_map->get (allocator);
12051 tree a = build_simple_mem_ref_loc (loc, sarg);
12052 allocator = omp_build_component_ref (a, allocator);
12054 allocator = fold_convert (pointer_sized_int_node, allocator);
12055 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12056 tree align = build_int_cst (size_type_node,
12057 DECL_ALIGN_UNIT (decl));
12058 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12059 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12060 allocator);
12061 ptr = fold_convert (TREE_TYPE (dst), ptr);
12062 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12063 append_to_statement_list (t, &list);
12064 dst = build_simple_mem_ref_loc (loc, dst);
12066 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12068 append_to_statement_list (t, &list);
12069 break;
12070 case OMP_CLAUSE_PRIVATE:
12071 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12072 break;
12073 decl = OMP_CLAUSE_DECL (c);
12074 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12075 f = (tree) n->value;
12076 if (tcctx.cb.decl_map)
12077 f = *tcctx.cb.decl_map->get (f);
12078 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12079 if (n != NULL)
12081 sf = (tree) n->value;
12082 if (tcctx.cb.decl_map)
12083 sf = *tcctx.cb.decl_map->get (sf);
12084 src = build_simple_mem_ref_loc (loc, sarg);
12085 src = omp_build_component_ref (src, sf);
12086 if (use_pointer_for_field (decl, NULL))
12087 src = build_simple_mem_ref_loc (loc, src);
12089 else
12090 src = decl;
12091 dst = build_simple_mem_ref_loc (loc, arg);
12092 dst = omp_build_component_ref (dst, f);
12093 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12094 append_to_statement_list (t, &list);
12095 break;
12096 default:
12097 break;
12100 /* Last pass: handle VLA firstprivates. */
12101 if (tcctx.cb.decl_map)
12102 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12103 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12105 tree ind, ptr, df;
12107 decl = OMP_CLAUSE_DECL (c);
12108 if (!is_variable_sized (decl))
12109 continue;
12110 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12111 if (n == NULL)
12112 continue;
12113 f = (tree) n->value;
12114 f = *tcctx.cb.decl_map->get (f);
12115 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12116 ind = DECL_VALUE_EXPR (decl);
12117 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12118 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12119 n = splay_tree_lookup (ctx->sfield_map,
12120 (splay_tree_key) TREE_OPERAND (ind, 0));
12121 sf = (tree) n->value;
12122 sf = *tcctx.cb.decl_map->get (sf);
12123 src = build_simple_mem_ref_loc (loc, sarg);
12124 src = omp_build_component_ref (src, sf);
12125 src = build_simple_mem_ref_loc (loc, src);
12126 dst = build_simple_mem_ref_loc (loc, arg);
12127 dst = omp_build_component_ref (dst, f);
12128 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12129 append_to_statement_list (t, &list);
12130 n = splay_tree_lookup (ctx->field_map,
12131 (splay_tree_key) TREE_OPERAND (ind, 0));
12132 df = (tree) n->value;
12133 df = *tcctx.cb.decl_map->get (df);
12134 ptr = build_simple_mem_ref_loc (loc, arg);
12135 ptr = omp_build_component_ref (ptr, df);
12136 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12137 build_fold_addr_expr_loc (loc, dst));
12138 append_to_statement_list (t, &list);
12141 t = build1 (RETURN_EXPR, void_type_node, NULL);
12142 append_to_statement_list (t, &list);
12144 if (tcctx.cb.decl_map)
12145 delete tcctx.cb.decl_map;
12146 pop_gimplify_context (NULL);
12147 BIND_EXPR_BODY (bind) = list;
12148 pop_cfun ();
12151 static void
12152 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12154 tree c, clauses;
12155 gimple *g;
12156 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
12158 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12159 gcc_assert (clauses);
12160 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12161 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12162 switch (OMP_CLAUSE_DEPEND_KIND (c))
12164 case OMP_CLAUSE_DEPEND_LAST:
12165 /* Lowering already done at gimplification. */
12166 return;
12167 case OMP_CLAUSE_DEPEND_IN:
12168 cnt[2]++;
12169 break;
12170 case OMP_CLAUSE_DEPEND_OUT:
12171 case OMP_CLAUSE_DEPEND_INOUT:
12172 cnt[0]++;
12173 break;
12174 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12175 cnt[1]++;
12176 break;
12177 case OMP_CLAUSE_DEPEND_DEPOBJ:
12178 cnt[3]++;
12179 break;
12180 case OMP_CLAUSE_DEPEND_SOURCE:
12181 case OMP_CLAUSE_DEPEND_SINK:
12182 /* FALLTHRU */
12183 default:
12184 gcc_unreachable ();
12186 if (cnt[1] || cnt[3])
12187 idx = 5;
12188 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
12189 tree type = build_array_type_nelts (ptr_type_node, total + idx);
12190 tree array = create_tmp_var (type);
12191 TREE_ADDRESSABLE (array) = 1;
12192 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12193 NULL_TREE);
12194 if (idx == 5)
12196 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12197 gimple_seq_add_stmt (iseq, g);
12198 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12199 NULL_TREE);
12201 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12202 gimple_seq_add_stmt (iseq, g);
12203 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12205 r = build4 (ARRAY_REF, ptr_type_node, array,
12206 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12207 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12208 gimple_seq_add_stmt (iseq, g);
12210 for (i = 0; i < 4; i++)
12212 if (cnt[i] == 0)
12213 continue;
12214 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12215 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12216 continue;
12217 else
12219 switch (OMP_CLAUSE_DEPEND_KIND (c))
12221 case OMP_CLAUSE_DEPEND_IN:
12222 if (i != 2)
12223 continue;
12224 break;
12225 case OMP_CLAUSE_DEPEND_OUT:
12226 case OMP_CLAUSE_DEPEND_INOUT:
12227 if (i != 0)
12228 continue;
12229 break;
12230 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12231 if (i != 1)
12232 continue;
12233 break;
12234 case OMP_CLAUSE_DEPEND_DEPOBJ:
12235 if (i != 3)
12236 continue;
12237 break;
12238 default:
12239 gcc_unreachable ();
12241 tree t = OMP_CLAUSE_DECL (c);
12242 t = fold_convert (ptr_type_node, t);
12243 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12244 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12245 NULL_TREE, NULL_TREE);
12246 g = gimple_build_assign (r, t);
12247 gimple_seq_add_stmt (iseq, g);
12250 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12251 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12252 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12253 OMP_CLAUSE_CHAIN (c) = *pclauses;
12254 *pclauses = c;
12255 tree clobber = build_clobber (type);
12256 g = gimple_build_assign (array, clobber);
12257 gimple_seq_add_stmt (oseq, g);
12260 /* Lower the OpenMP parallel or task directive in the current statement
12261 in GSI_P. CTX holds context information for the directive. */
12263 static void
12264 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12266 tree clauses;
12267 tree child_fn, t;
12268 gimple *stmt = gsi_stmt (*gsi_p);
12269 gbind *par_bind, *bind, *dep_bind = NULL;
12270 gimple_seq par_body;
12271 location_t loc = gimple_location (stmt);
12273 clauses = gimple_omp_taskreg_clauses (stmt);
12274 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12275 && gimple_omp_task_taskwait_p (stmt))
12277 par_bind = NULL;
12278 par_body = NULL;
12280 else
12282 par_bind
12283 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12284 par_body = gimple_bind_body (par_bind);
12286 child_fn = ctx->cb.dst_fn;
12287 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12288 && !gimple_omp_parallel_combined_p (stmt))
12290 struct walk_stmt_info wi;
12291 int ws_num = 0;
12293 memset (&wi, 0, sizeof (wi));
12294 wi.info = &ws_num;
12295 wi.val_only = true;
12296 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12297 if (ws_num == 1)
12298 gimple_omp_parallel_set_combined_p (stmt, true);
12300 gimple_seq dep_ilist = NULL;
12301 gimple_seq dep_olist = NULL;
12302 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12303 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12305 push_gimplify_context ();
12306 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12307 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12308 &dep_ilist, &dep_olist);
12311 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12312 && gimple_omp_task_taskwait_p (stmt))
12314 if (dep_bind)
12316 gsi_replace (gsi_p, dep_bind, true);
12317 gimple_bind_add_seq (dep_bind, dep_ilist);
12318 gimple_bind_add_stmt (dep_bind, stmt);
12319 gimple_bind_add_seq (dep_bind, dep_olist);
12320 pop_gimplify_context (dep_bind);
12322 return;
12325 if (ctx->srecord_type)
12326 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12328 gimple_seq tskred_ilist = NULL;
12329 gimple_seq tskred_olist = NULL;
12330 if ((is_task_ctx (ctx)
12331 && gimple_omp_task_taskloop_p (ctx->stmt)
12332 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12333 OMP_CLAUSE_REDUCTION))
12334 || (is_parallel_ctx (ctx)
12335 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12336 OMP_CLAUSE__REDUCTEMP_)))
12338 if (dep_bind == NULL)
12340 push_gimplify_context ();
12341 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12343 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12344 : OMP_PARALLEL,
12345 gimple_omp_taskreg_clauses (ctx->stmt),
12346 &tskred_ilist, &tskred_olist);
12349 push_gimplify_context ();
12351 gimple_seq par_olist = NULL;
12352 gimple_seq par_ilist = NULL;
12353 gimple_seq par_rlist = NULL;
12354 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12355 lower_omp (&par_body, ctx);
12356 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12357 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12359 /* Declare all the variables created by mapping and the variables
12360 declared in the scope of the parallel body. */
12361 record_vars_into (ctx->block_vars, child_fn);
12362 maybe_remove_omp_member_access_dummy_vars (par_bind);
12363 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12365 if (ctx->record_type)
12367 ctx->sender_decl
12368 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12369 : ctx->record_type, ".omp_data_o");
12370 DECL_NAMELESS (ctx->sender_decl) = 1;
12371 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12372 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12375 gimple_seq olist = NULL;
12376 gimple_seq ilist = NULL;
12377 lower_send_clauses (clauses, &ilist, &olist, ctx);
12378 lower_send_shared_vars (&ilist, &olist, ctx);
12380 if (ctx->record_type)
12382 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12383 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12384 clobber));
12387 /* Once all the expansions are done, sequence all the different
12388 fragments inside gimple_omp_body. */
12390 gimple_seq new_body = NULL;
12392 if (ctx->record_type)
12394 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12395 /* fixup_child_record_type might have changed receiver_decl's type. */
12396 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12397 gimple_seq_add_stmt (&new_body,
12398 gimple_build_assign (ctx->receiver_decl, t));
12401 gimple_seq_add_seq (&new_body, par_ilist);
12402 gimple_seq_add_seq (&new_body, par_body);
12403 gimple_seq_add_seq (&new_body, par_rlist);
12404 if (ctx->cancellable)
12405 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12406 gimple_seq_add_seq (&new_body, par_olist);
12407 new_body = maybe_catch_exception (new_body);
12408 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12409 gimple_seq_add_stmt (&new_body,
12410 gimple_build_omp_continue (integer_zero_node,
12411 integer_zero_node));
12412 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12413 gimple_omp_set_body (stmt, new_body);
12415 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12416 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12417 else
12418 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12419 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12420 gimple_bind_add_seq (bind, ilist);
12421 gimple_bind_add_stmt (bind, stmt);
12422 gimple_bind_add_seq (bind, olist);
12424 pop_gimplify_context (NULL);
12426 if (dep_bind)
12428 gimple_bind_add_seq (dep_bind, dep_ilist);
12429 gimple_bind_add_seq (dep_bind, tskred_ilist);
12430 gimple_bind_add_stmt (dep_bind, bind);
12431 gimple_bind_add_seq (dep_bind, tskred_olist);
12432 gimple_bind_add_seq (dep_bind, dep_olist);
12433 pop_gimplify_context (dep_bind);
12437 /* Lower the GIMPLE_OMP_TARGET in the current statement
12438 in GSI_P. CTX holds context information for the directive. */
12440 static void
12441 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12443 tree clauses;
12444 tree child_fn, t, c;
12445 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12446 gbind *tgt_bind, *bind, *dep_bind = NULL;
12447 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12448 location_t loc = gimple_location (stmt);
12449 bool offloaded, data_region;
12450 unsigned int map_cnt = 0;
12451 tree in_reduction_clauses = NULL_TREE;
12453 offloaded = is_gimple_omp_offloaded (stmt);
12454 switch (gimple_omp_target_kind (stmt))
12456 case GF_OMP_TARGET_KIND_REGION:
12457 tree *p, *q;
12458 q = &in_reduction_clauses;
12459 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12460 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12462 *q = *p;
12463 q = &OMP_CLAUSE_CHAIN (*q);
12464 *p = OMP_CLAUSE_CHAIN (*p);
12466 else
12467 p = &OMP_CLAUSE_CHAIN (*p);
12468 *q = NULL_TREE;
12469 *p = in_reduction_clauses;
12470 /* FALLTHRU */
12471 case GF_OMP_TARGET_KIND_UPDATE:
12472 case GF_OMP_TARGET_KIND_ENTER_DATA:
12473 case GF_OMP_TARGET_KIND_EXIT_DATA:
12474 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12475 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12476 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12477 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12478 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12479 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12480 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12481 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12482 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12483 data_region = false;
12484 break;
12485 case GF_OMP_TARGET_KIND_DATA:
12486 case GF_OMP_TARGET_KIND_OACC_DATA:
12487 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12488 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12489 data_region = true;
12490 break;
12491 default:
12492 gcc_unreachable ();
12495 clauses = gimple_omp_target_clauses (stmt);
12497 gimple_seq dep_ilist = NULL;
12498 gimple_seq dep_olist = NULL;
12499 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12500 if (has_depend || in_reduction_clauses)
12502 push_gimplify_context ();
12503 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12504 if (has_depend)
12505 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12506 &dep_ilist, &dep_olist);
12507 if (in_reduction_clauses)
12508 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12509 ctx, NULL);
12512 tgt_bind = NULL;
12513 tgt_body = NULL;
12514 if (offloaded)
12516 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12517 tgt_body = gimple_bind_body (tgt_bind);
12519 else if (data_region)
12520 tgt_body = gimple_omp_body (stmt);
12521 child_fn = ctx->cb.dst_fn;
12523 push_gimplify_context ();
12524 fplist = NULL;
12526 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12527 switch (OMP_CLAUSE_CODE (c))
12529 tree var, x;
12531 default:
12532 break;
12533 case OMP_CLAUSE_MAP:
12534 #if CHECKING_P
12535 /* First check what we're prepared to handle in the following. */
12536 switch (OMP_CLAUSE_MAP_KIND (c))
12538 case GOMP_MAP_ALLOC:
12539 case GOMP_MAP_TO:
12540 case GOMP_MAP_FROM:
12541 case GOMP_MAP_TOFROM:
12542 case GOMP_MAP_POINTER:
12543 case GOMP_MAP_TO_PSET:
12544 case GOMP_MAP_DELETE:
12545 case GOMP_MAP_RELEASE:
12546 case GOMP_MAP_ALWAYS_TO:
12547 case GOMP_MAP_ALWAYS_FROM:
12548 case GOMP_MAP_ALWAYS_TOFROM:
12549 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12550 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12551 case GOMP_MAP_STRUCT:
12552 case GOMP_MAP_ALWAYS_POINTER:
12553 case GOMP_MAP_ATTACH:
12554 case GOMP_MAP_DETACH:
12555 break;
12556 case GOMP_MAP_IF_PRESENT:
12557 case GOMP_MAP_FORCE_ALLOC:
12558 case GOMP_MAP_FORCE_TO:
12559 case GOMP_MAP_FORCE_FROM:
12560 case GOMP_MAP_FORCE_TOFROM:
12561 case GOMP_MAP_FORCE_PRESENT:
12562 case GOMP_MAP_FORCE_DEVICEPTR:
12563 case GOMP_MAP_DEVICE_RESIDENT:
12564 case GOMP_MAP_LINK:
12565 case GOMP_MAP_FORCE_DETACH:
12566 gcc_assert (is_gimple_omp_oacc (stmt));
12567 break;
12568 default:
12569 gcc_unreachable ();
12571 #endif
12572 /* FALLTHRU */
12573 case OMP_CLAUSE_TO:
12574 case OMP_CLAUSE_FROM:
12575 oacc_firstprivate:
12576 var = OMP_CLAUSE_DECL (c);
12577 if (!DECL_P (var))
12579 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12580 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12581 && (OMP_CLAUSE_MAP_KIND (c)
12582 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12583 map_cnt++;
12584 continue;
12587 if (DECL_SIZE (var)
12588 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12590 tree var2 = DECL_VALUE_EXPR (var);
12591 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12592 var2 = TREE_OPERAND (var2, 0);
12593 gcc_assert (DECL_P (var2));
12594 var = var2;
12597 if (offloaded
12598 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12599 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12600 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12602 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12604 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12605 && varpool_node::get_create (var)->offloadable)
12606 continue;
12608 tree type = build_pointer_type (TREE_TYPE (var));
12609 tree new_var = lookup_decl (var, ctx);
12610 x = create_tmp_var_raw (type, get_name (new_var));
12611 gimple_add_tmp_var (x);
12612 x = build_simple_mem_ref (x);
12613 SET_DECL_VALUE_EXPR (new_var, x);
12614 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12616 continue;
12619 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12620 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12621 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12622 && is_omp_target (stmt))
12624 gcc_assert (maybe_lookup_field (c, ctx));
12625 map_cnt++;
12626 continue;
12629 if (!maybe_lookup_field (var, ctx))
12630 continue;
12632 /* Don't remap compute constructs' reduction variables, because the
12633 intermediate result must be local to each gang. */
12634 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12635 && is_gimple_omp_oacc (ctx->stmt)
12636 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12638 x = build_receiver_ref (var, true, ctx);
12639 tree new_var = lookup_decl (var, ctx);
12641 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12642 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12643 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12644 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12645 x = build_simple_mem_ref (x);
12646 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12648 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12649 if (omp_is_reference (new_var)
12650 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12651 || DECL_BY_REFERENCE (var)))
12653 /* Create a local object to hold the instance
12654 value. */
12655 tree type = TREE_TYPE (TREE_TYPE (new_var));
12656 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12657 tree inst = create_tmp_var (type, id);
12658 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12659 x = build_fold_addr_expr (inst);
12661 gimplify_assign (new_var, x, &fplist);
12663 else if (DECL_P (new_var))
12665 SET_DECL_VALUE_EXPR (new_var, x);
12666 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12668 else
12669 gcc_unreachable ();
12671 map_cnt++;
12672 break;
12674 case OMP_CLAUSE_FIRSTPRIVATE:
12675 gcc_checking_assert (offloaded);
12676 if (is_gimple_omp_oacc (ctx->stmt))
12678 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12679 gcc_checking_assert (!is_oacc_kernels (ctx));
12680 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12681 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12683 goto oacc_firstprivate;
12685 map_cnt++;
12686 var = OMP_CLAUSE_DECL (c);
12687 if (!omp_is_reference (var)
12688 && !is_gimple_reg_type (TREE_TYPE (var)))
12690 tree new_var = lookup_decl (var, ctx);
12691 if (is_variable_sized (var))
12693 tree pvar = DECL_VALUE_EXPR (var);
12694 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12695 pvar = TREE_OPERAND (pvar, 0);
12696 gcc_assert (DECL_P (pvar));
12697 tree new_pvar = lookup_decl (pvar, ctx);
12698 x = build_fold_indirect_ref (new_pvar);
12699 TREE_THIS_NOTRAP (x) = 1;
12701 else
12702 x = build_receiver_ref (var, true, ctx);
12703 SET_DECL_VALUE_EXPR (new_var, x);
12704 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12706 break;
12708 case OMP_CLAUSE_PRIVATE:
12709 gcc_checking_assert (offloaded);
12710 if (is_gimple_omp_oacc (ctx->stmt))
12712 /* No 'private' clauses on OpenACC 'kernels'. */
12713 gcc_checking_assert (!is_oacc_kernels (ctx));
12714 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12715 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12717 break;
12719 var = OMP_CLAUSE_DECL (c);
12720 if (is_variable_sized (var))
12722 tree new_var = lookup_decl (var, ctx);
12723 tree pvar = DECL_VALUE_EXPR (var);
12724 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12725 pvar = TREE_OPERAND (pvar, 0);
12726 gcc_assert (DECL_P (pvar));
12727 tree new_pvar = lookup_decl (pvar, ctx);
12728 x = build_fold_indirect_ref (new_pvar);
12729 TREE_THIS_NOTRAP (x) = 1;
12730 SET_DECL_VALUE_EXPR (new_var, x);
12731 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12733 break;
12735 case OMP_CLAUSE_USE_DEVICE_PTR:
12736 case OMP_CLAUSE_USE_DEVICE_ADDR:
12737 case OMP_CLAUSE_IS_DEVICE_PTR:
12738 var = OMP_CLAUSE_DECL (c);
12739 map_cnt++;
12740 if (is_variable_sized (var))
12742 tree new_var = lookup_decl (var, ctx);
12743 tree pvar = DECL_VALUE_EXPR (var);
12744 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12745 pvar = TREE_OPERAND (pvar, 0);
12746 gcc_assert (DECL_P (pvar));
12747 tree new_pvar = lookup_decl (pvar, ctx);
12748 x = build_fold_indirect_ref (new_pvar);
12749 TREE_THIS_NOTRAP (x) = 1;
12750 SET_DECL_VALUE_EXPR (new_var, x);
12751 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12753 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12754 && !omp_is_reference (var)
12755 && !omp_is_allocatable_or_ptr (var)
12756 && !lang_hooks.decls.omp_array_data (var, true))
12757 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12759 tree new_var = lookup_decl (var, ctx);
12760 tree type = build_pointer_type (TREE_TYPE (var));
12761 x = create_tmp_var_raw (type, get_name (new_var));
12762 gimple_add_tmp_var (x);
12763 x = build_simple_mem_ref (x);
12764 SET_DECL_VALUE_EXPR (new_var, x);
12765 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12767 else
12769 tree new_var = lookup_decl (var, ctx);
12770 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12771 gimple_add_tmp_var (x);
12772 SET_DECL_VALUE_EXPR (new_var, x);
12773 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12775 break;
12778 if (offloaded)
12780 target_nesting_level++;
12781 lower_omp (&tgt_body, ctx);
12782 target_nesting_level--;
12784 else if (data_region)
12785 lower_omp (&tgt_body, ctx);
12787 if (offloaded)
12789 /* Declare all the variables created by mapping and the variables
12790 declared in the scope of the target body. */
12791 record_vars_into (ctx->block_vars, child_fn);
12792 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
12793 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
12796 olist = NULL;
12797 ilist = NULL;
12798 if (ctx->record_type)
12800 ctx->sender_decl
12801 = create_tmp_var (ctx->record_type, ".omp_data_arr");
12802 DECL_NAMELESS (ctx->sender_decl) = 1;
12803 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12804 t = make_tree_vec (3);
12805 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12806 TREE_VEC_ELT (t, 1)
12807 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12808 ".omp_data_sizes");
12809 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12810 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12811 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12812 tree tkind_type = short_unsigned_type_node;
12813 int talign_shift = 8;
12814 TREE_VEC_ELT (t, 2)
12815 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12816 ".omp_data_kinds");
12817 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12818 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12819 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12820 gimple_omp_target_set_data_arg (stmt, t);
12822 vec<constructor_elt, va_gc> *vsize;
12823 vec<constructor_elt, va_gc> *vkind;
12824 vec_alloc (vsize, map_cnt);
12825 vec_alloc (vkind, map_cnt);
12826 unsigned int map_idx = 0;
12828 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12829 switch (OMP_CLAUSE_CODE (c))
12831 tree ovar, nc, s, purpose, var, x, type;
12832 unsigned int talign;
12834 default:
12835 break;
12837 case OMP_CLAUSE_MAP:
12838 case OMP_CLAUSE_TO:
12839 case OMP_CLAUSE_FROM:
12840 oacc_firstprivate_map:
12841 nc = c;
12842 ovar = OMP_CLAUSE_DECL (c);
12843 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12844 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12845 || (OMP_CLAUSE_MAP_KIND (c)
12846 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12847 break;
12848 if (!DECL_P (ovar))
12850 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12851 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
12853 nc = OMP_CLAUSE_CHAIN (c);
12854 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
12855 == get_base_address (ovar));
12856 ovar = OMP_CLAUSE_DECL (nc);
12858 else
12860 tree x = build_sender_ref (ovar, ctx);
12861 tree v = ovar;
12862 if (in_reduction_clauses
12863 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12864 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
12866 v = unshare_expr (v);
12867 tree *p = &v;
12868 while (handled_component_p (*p)
12869 || TREE_CODE (*p) == INDIRECT_REF
12870 || TREE_CODE (*p) == ADDR_EXPR
12871 || TREE_CODE (*p) == MEM_REF
12872 || TREE_CODE (*p) == NON_LVALUE_EXPR)
12873 p = &TREE_OPERAND (*p, 0);
12874 tree d = *p;
12875 if (is_variable_sized (d))
12877 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
12878 d = DECL_VALUE_EXPR (d);
12879 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
12880 d = TREE_OPERAND (d, 0);
12881 gcc_assert (DECL_P (d));
12883 splay_tree_key key
12884 = (splay_tree_key) &DECL_CONTEXT (d);
12885 tree nd = (tree) splay_tree_lookup (ctx->field_map,
12886 key)->value;
12887 if (d == *p)
12888 *p = nd;
12889 else
12890 *p = build_fold_indirect_ref (nd);
12892 v = build_fold_addr_expr_with_type (v, ptr_type_node);
12893 gimplify_assign (x, v, &ilist);
12894 nc = NULL_TREE;
12897 else
12899 if (DECL_SIZE (ovar)
12900 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
12902 tree ovar2 = DECL_VALUE_EXPR (ovar);
12903 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
12904 ovar2 = TREE_OPERAND (ovar2, 0);
12905 gcc_assert (DECL_P (ovar2));
12906 ovar = ovar2;
12908 if (!maybe_lookup_field (ovar, ctx)
12909 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12910 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12911 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
12912 continue;
12915 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
12916 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
12917 talign = DECL_ALIGN_UNIT (ovar);
12919 var = NULL_TREE;
12920 if (nc)
12922 if (in_reduction_clauses
12923 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12924 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
12926 tree d = ovar;
12927 if (is_variable_sized (d))
12929 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
12930 d = DECL_VALUE_EXPR (d);
12931 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
12932 d = TREE_OPERAND (d, 0);
12933 gcc_assert (DECL_P (d));
12935 splay_tree_key key
12936 = (splay_tree_key) &DECL_CONTEXT (d);
12937 tree nd = (tree) splay_tree_lookup (ctx->field_map,
12938 key)->value;
12939 if (d == ovar)
12940 var = nd;
12941 else
12942 var = build_fold_indirect_ref (nd);
12944 else
12945 var = lookup_decl_in_outer_ctx (ovar, ctx);
12947 if (nc
12948 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12949 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12950 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12951 && is_omp_target (stmt))
12953 x = build_sender_ref (c, ctx);
12954 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
12956 else if (nc)
12958 x = build_sender_ref (ovar, ctx);
12960 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12961 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12962 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12963 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
12965 gcc_assert (offloaded);
12966 tree avar
12967 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
12968 mark_addressable (avar);
12969 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
12970 talign = DECL_ALIGN_UNIT (avar);
12971 avar = build_fold_addr_expr (avar);
12972 gimplify_assign (x, avar, &ilist);
12974 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12976 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12977 if (!omp_is_reference (var))
12979 if (is_gimple_reg (var)
12980 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12981 suppress_warning (var);
12982 var = build_fold_addr_expr (var);
12984 else
12985 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12986 gimplify_assign (x, var, &ilist);
12988 else if (is_gimple_reg (var))
12990 gcc_assert (offloaded);
12991 tree avar = create_tmp_var (TREE_TYPE (var));
12992 mark_addressable (avar);
12993 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
12994 if (GOMP_MAP_COPY_TO_P (map_kind)
12995 || map_kind == GOMP_MAP_POINTER
12996 || map_kind == GOMP_MAP_TO_PSET
12997 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
12999 /* If we need to initialize a temporary
13000 with VAR because it is not addressable, and
13001 the variable hasn't been initialized yet, then
13002 we'll get a warning for the store to avar.
13003 Don't warn in that case, the mapping might
13004 be implicit. */
13005 suppress_warning (var, OPT_Wuninitialized);
13006 gimplify_assign (avar, var, &ilist);
13008 avar = build_fold_addr_expr (avar);
13009 gimplify_assign (x, avar, &ilist);
13010 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13011 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13012 && !TYPE_READONLY (TREE_TYPE (var)))
13014 x = unshare_expr (x);
13015 x = build_simple_mem_ref (x);
13016 gimplify_assign (var, x, &olist);
13019 else
13021 /* While MAP is handled explicitly by the FE,
13022 for 'target update', only the identified is passed. */
13023 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13024 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13025 && (omp_is_allocatable_or_ptr (var)
13026 && omp_check_optional_argument (var, false)))
13027 var = build_fold_indirect_ref (var);
13028 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13029 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13030 || (!omp_is_allocatable_or_ptr (var)
13031 && !omp_check_optional_argument (var, false)))
13032 var = build_fold_addr_expr (var);
13033 gimplify_assign (x, var, &ilist);
13036 s = NULL_TREE;
13037 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13039 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13040 s = TREE_TYPE (ovar);
13041 if (TREE_CODE (s) == REFERENCE_TYPE
13042 || omp_check_optional_argument (ovar, false))
13043 s = TREE_TYPE (s);
13044 s = TYPE_SIZE_UNIT (s);
13046 else
13047 s = OMP_CLAUSE_SIZE (c);
13048 if (s == NULL_TREE)
13049 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13050 s = fold_convert (size_type_node, s);
13051 purpose = size_int (map_idx++);
13052 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13053 if (TREE_CODE (s) != INTEGER_CST)
13054 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13056 unsigned HOST_WIDE_INT tkind, tkind_zero;
13057 switch (OMP_CLAUSE_CODE (c))
13059 case OMP_CLAUSE_MAP:
13060 tkind = OMP_CLAUSE_MAP_KIND (c);
13061 tkind_zero = tkind;
13062 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13063 switch (tkind)
13065 case GOMP_MAP_ALLOC:
13066 case GOMP_MAP_IF_PRESENT:
13067 case GOMP_MAP_TO:
13068 case GOMP_MAP_FROM:
13069 case GOMP_MAP_TOFROM:
13070 case GOMP_MAP_ALWAYS_TO:
13071 case GOMP_MAP_ALWAYS_FROM:
13072 case GOMP_MAP_ALWAYS_TOFROM:
13073 case GOMP_MAP_RELEASE:
13074 case GOMP_MAP_FORCE_TO:
13075 case GOMP_MAP_FORCE_FROM:
13076 case GOMP_MAP_FORCE_TOFROM:
13077 case GOMP_MAP_FORCE_PRESENT:
13078 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13079 break;
13080 case GOMP_MAP_DELETE:
13081 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13082 default:
13083 break;
13085 if (tkind_zero != tkind)
13087 if (integer_zerop (s))
13088 tkind = tkind_zero;
13089 else if (integer_nonzerop (s))
13090 tkind_zero = tkind;
13092 break;
13093 case OMP_CLAUSE_FIRSTPRIVATE:
13094 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13095 tkind = GOMP_MAP_TO;
13096 tkind_zero = tkind;
13097 break;
13098 case OMP_CLAUSE_TO:
13099 tkind = GOMP_MAP_TO;
13100 tkind_zero = tkind;
13101 break;
13102 case OMP_CLAUSE_FROM:
13103 tkind = GOMP_MAP_FROM;
13104 tkind_zero = tkind;
13105 break;
13106 default:
13107 gcc_unreachable ();
13109 gcc_checking_assert (tkind
13110 < (HOST_WIDE_INT_C (1U) << talign_shift));
13111 gcc_checking_assert (tkind_zero
13112 < (HOST_WIDE_INT_C (1U) << talign_shift));
13113 talign = ceil_log2 (talign);
13114 tkind |= talign << talign_shift;
13115 tkind_zero |= talign << talign_shift;
13116 gcc_checking_assert (tkind
13117 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13118 gcc_checking_assert (tkind_zero
13119 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13120 if (tkind == tkind_zero)
13121 x = build_int_cstu (tkind_type, tkind);
13122 else
13124 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13125 x = build3 (COND_EXPR, tkind_type,
13126 fold_build2 (EQ_EXPR, boolean_type_node,
13127 unshare_expr (s), size_zero_node),
13128 build_int_cstu (tkind_type, tkind_zero),
13129 build_int_cstu (tkind_type, tkind));
13131 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13132 if (nc && nc != c)
13133 c = nc;
13134 break;
13136 case OMP_CLAUSE_FIRSTPRIVATE:
13137 if (is_gimple_omp_oacc (ctx->stmt))
13138 goto oacc_firstprivate_map;
13139 ovar = OMP_CLAUSE_DECL (c);
13140 if (omp_is_reference (ovar))
13141 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13142 else
13143 talign = DECL_ALIGN_UNIT (ovar);
13144 var = lookup_decl_in_outer_ctx (ovar, ctx);
13145 x = build_sender_ref (ovar, ctx);
13146 tkind = GOMP_MAP_FIRSTPRIVATE;
13147 type = TREE_TYPE (ovar);
13148 if (omp_is_reference (ovar))
13149 type = TREE_TYPE (type);
13150 if ((INTEGRAL_TYPE_P (type)
13151 && TYPE_PRECISION (type) <= POINTER_SIZE)
13152 || TREE_CODE (type) == POINTER_TYPE)
13154 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13155 tree t = var;
13156 if (omp_is_reference (var))
13157 t = build_simple_mem_ref (var);
13158 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13159 suppress_warning (var);
13160 if (TREE_CODE (type) != POINTER_TYPE)
13161 t = fold_convert (pointer_sized_int_node, t);
13162 t = fold_convert (TREE_TYPE (x), t);
13163 gimplify_assign (x, t, &ilist);
13165 else if (omp_is_reference (var))
13166 gimplify_assign (x, var, &ilist);
13167 else if (is_gimple_reg (var))
13169 tree avar = create_tmp_var (TREE_TYPE (var));
13170 mark_addressable (avar);
13171 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13172 suppress_warning (var);
13173 gimplify_assign (avar, var, &ilist);
13174 avar = build_fold_addr_expr (avar);
13175 gimplify_assign (x, avar, &ilist);
13177 else
13179 var = build_fold_addr_expr (var);
13180 gimplify_assign (x, var, &ilist);
13182 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13183 s = size_int (0);
13184 else if (omp_is_reference (ovar))
13185 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13186 else
13187 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13188 s = fold_convert (size_type_node, s);
13189 purpose = size_int (map_idx++);
13190 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13191 if (TREE_CODE (s) != INTEGER_CST)
13192 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13194 gcc_checking_assert (tkind
13195 < (HOST_WIDE_INT_C (1U) << talign_shift));
13196 talign = ceil_log2 (talign);
13197 tkind |= talign << talign_shift;
13198 gcc_checking_assert (tkind
13199 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13200 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13201 build_int_cstu (tkind_type, tkind));
13202 break;
13204 case OMP_CLAUSE_USE_DEVICE_PTR:
13205 case OMP_CLAUSE_USE_DEVICE_ADDR:
13206 case OMP_CLAUSE_IS_DEVICE_PTR:
13207 ovar = OMP_CLAUSE_DECL (c);
13208 var = lookup_decl_in_outer_ctx (ovar, ctx);
13210 if (lang_hooks.decls.omp_array_data (ovar, true))
13212 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13213 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13214 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13216 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13218 tkind = GOMP_MAP_USE_DEVICE_PTR;
13219 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13221 else
13223 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13224 x = build_sender_ref (ovar, ctx);
13227 if (is_gimple_omp_oacc (ctx->stmt))
13229 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13231 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13232 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13235 type = TREE_TYPE (ovar);
13236 if (lang_hooks.decls.omp_array_data (ovar, true))
13237 var = lang_hooks.decls.omp_array_data (ovar, false);
13238 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13239 && !omp_is_reference (ovar)
13240 && !omp_is_allocatable_or_ptr (ovar))
13241 || TREE_CODE (type) == ARRAY_TYPE)
13242 var = build_fold_addr_expr (var);
13243 else
13245 if (omp_is_reference (ovar)
13246 || omp_check_optional_argument (ovar, false)
13247 || omp_is_allocatable_or_ptr (ovar))
13249 type = TREE_TYPE (type);
13250 if (POINTER_TYPE_P (type)
13251 && TREE_CODE (type) != ARRAY_TYPE
13252 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13253 && !omp_is_allocatable_or_ptr (ovar))
13254 || (omp_is_reference (ovar)
13255 && omp_is_allocatable_or_ptr (ovar))))
13256 var = build_simple_mem_ref (var);
13257 var = fold_convert (TREE_TYPE (x), var);
13260 tree present;
13261 present = omp_check_optional_argument (ovar, true);
13262 if (present)
13264 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13265 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13266 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13267 tree new_x = unshare_expr (x);
13268 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13269 fb_rvalue);
13270 gcond *cond = gimple_build_cond_from_tree (present,
13271 notnull_label,
13272 null_label);
13273 gimple_seq_add_stmt (&ilist, cond);
13274 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13275 gimplify_assign (new_x, null_pointer_node, &ilist);
13276 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13277 gimple_seq_add_stmt (&ilist,
13278 gimple_build_label (notnull_label));
13279 gimplify_assign (x, var, &ilist);
13280 gimple_seq_add_stmt (&ilist,
13281 gimple_build_label (opt_arg_label));
13283 else
13284 gimplify_assign (x, var, &ilist);
13285 s = size_int (0);
13286 purpose = size_int (map_idx++);
13287 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13288 gcc_checking_assert (tkind
13289 < (HOST_WIDE_INT_C (1U) << talign_shift));
13290 gcc_checking_assert (tkind
13291 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13292 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13293 build_int_cstu (tkind_type, tkind));
13294 break;
13297 gcc_assert (map_idx == map_cnt);
13299 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13300 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13301 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13302 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13303 for (int i = 1; i <= 2; i++)
13304 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13306 gimple_seq initlist = NULL;
13307 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13308 TREE_VEC_ELT (t, i)),
13309 &initlist, true, NULL_TREE);
13310 gimple_seq_add_seq (&ilist, initlist);
13312 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13313 gimple_seq_add_stmt (&olist,
13314 gimple_build_assign (TREE_VEC_ELT (t, i),
13315 clobber));
13317 else if (omp_maybe_offloaded_ctx (ctx->outer))
13319 tree id = get_identifier ("omp declare target");
13320 tree decl = TREE_VEC_ELT (t, i);
13321 DECL_ATTRIBUTES (decl)
13322 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13323 varpool_node *node = varpool_node::get (decl);
13324 if (node)
13326 node->offloadable = 1;
13327 if (ENABLE_OFFLOADING)
13329 g->have_offload = true;
13330 vec_safe_push (offload_vars, t);
13335 tree clobber = build_clobber (ctx->record_type);
13336 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13337 clobber));
13340 /* Once all the expansions are done, sequence all the different
13341 fragments inside gimple_omp_body. */
13343 new_body = NULL;
13345 if (offloaded
13346 && ctx->record_type)
13348 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13349 /* fixup_child_record_type might have changed receiver_decl's type. */
13350 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13351 gimple_seq_add_stmt (&new_body,
13352 gimple_build_assign (ctx->receiver_decl, t));
13354 gimple_seq_add_seq (&new_body, fplist);
13356 if (offloaded || data_region)
13358 tree prev = NULL_TREE;
13359 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13360 switch (OMP_CLAUSE_CODE (c))
13362 tree var, x;
13363 default:
13364 break;
13365 case OMP_CLAUSE_FIRSTPRIVATE:
13366 if (is_gimple_omp_oacc (ctx->stmt))
13367 break;
13368 var = OMP_CLAUSE_DECL (c);
13369 if (omp_is_reference (var)
13370 || is_gimple_reg_type (TREE_TYPE (var)))
13372 tree new_var = lookup_decl (var, ctx);
13373 tree type;
13374 type = TREE_TYPE (var);
13375 if (omp_is_reference (var))
13376 type = TREE_TYPE (type);
13377 if ((INTEGRAL_TYPE_P (type)
13378 && TYPE_PRECISION (type) <= POINTER_SIZE)
13379 || TREE_CODE (type) == POINTER_TYPE)
13381 x = build_receiver_ref (var, false, ctx);
13382 if (TREE_CODE (type) != POINTER_TYPE)
13383 x = fold_convert (pointer_sized_int_node, x);
13384 x = fold_convert (type, x);
13385 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13386 fb_rvalue);
13387 if (omp_is_reference (var))
13389 tree v = create_tmp_var_raw (type, get_name (var));
13390 gimple_add_tmp_var (v);
13391 TREE_ADDRESSABLE (v) = 1;
13392 gimple_seq_add_stmt (&new_body,
13393 gimple_build_assign (v, x));
13394 x = build_fold_addr_expr (v);
13396 gimple_seq_add_stmt (&new_body,
13397 gimple_build_assign (new_var, x));
13399 else
13401 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
13402 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13403 fb_rvalue);
13404 gimple_seq_add_stmt (&new_body,
13405 gimple_build_assign (new_var, x));
13408 else if (is_variable_sized (var))
13410 tree pvar = DECL_VALUE_EXPR (var);
13411 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13412 pvar = TREE_OPERAND (pvar, 0);
13413 gcc_assert (DECL_P (pvar));
13414 tree new_var = lookup_decl (pvar, ctx);
13415 x = build_receiver_ref (var, false, ctx);
13416 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13417 gimple_seq_add_stmt (&new_body,
13418 gimple_build_assign (new_var, x));
13420 break;
13421 case OMP_CLAUSE_PRIVATE:
13422 if (is_gimple_omp_oacc (ctx->stmt))
13423 break;
13424 var = OMP_CLAUSE_DECL (c);
13425 if (omp_is_reference (var))
13427 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13428 tree new_var = lookup_decl (var, ctx);
13429 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13430 if (TREE_CONSTANT (x))
13432 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13433 get_name (var));
13434 gimple_add_tmp_var (x);
13435 TREE_ADDRESSABLE (x) = 1;
13436 x = build_fold_addr_expr_loc (clause_loc, x);
13438 else
13439 break;
13441 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13442 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13443 gimple_seq_add_stmt (&new_body,
13444 gimple_build_assign (new_var, x));
13446 break;
13447 case OMP_CLAUSE_USE_DEVICE_PTR:
13448 case OMP_CLAUSE_USE_DEVICE_ADDR:
13449 case OMP_CLAUSE_IS_DEVICE_PTR:
13450 tree new_var;
13451 gimple_seq assign_body;
13452 bool is_array_data;
13453 bool do_optional_check;
13454 assign_body = NULL;
13455 do_optional_check = false;
13456 var = OMP_CLAUSE_DECL (c);
13457 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13459 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13460 x = build_sender_ref (is_array_data
13461 ? (splay_tree_key) &DECL_NAME (var)
13462 : (splay_tree_key) &DECL_UID (var), ctx);
13463 else
13464 x = build_receiver_ref (var, false, ctx);
13466 if (is_array_data)
13468 bool is_ref = omp_is_reference (var);
13469 do_optional_check = true;
13470 /* First, we copy the descriptor data from the host; then
13471 we update its data to point to the target address. */
13472 new_var = lookup_decl (var, ctx);
13473 new_var = DECL_VALUE_EXPR (new_var);
13474 tree v = new_var;
13476 if (is_ref)
13478 var = build_fold_indirect_ref (var);
13479 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
13480 fb_rvalue);
13481 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
13482 gimple_add_tmp_var (v);
13483 TREE_ADDRESSABLE (v) = 1;
13484 gimple_seq_add_stmt (&assign_body,
13485 gimple_build_assign (v, var));
13486 tree rhs = build_fold_addr_expr (v);
13487 gimple_seq_add_stmt (&assign_body,
13488 gimple_build_assign (new_var, rhs));
13490 else
13491 gimple_seq_add_stmt (&assign_body,
13492 gimple_build_assign (new_var, var));
13494 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13495 gcc_assert (v2);
13496 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13497 gimple_seq_add_stmt (&assign_body,
13498 gimple_build_assign (v2, x));
13500 else if (is_variable_sized (var))
13502 tree pvar = DECL_VALUE_EXPR (var);
13503 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13504 pvar = TREE_OPERAND (pvar, 0);
13505 gcc_assert (DECL_P (pvar));
13506 new_var = lookup_decl (pvar, ctx);
13507 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13508 gimple_seq_add_stmt (&assign_body,
13509 gimple_build_assign (new_var, x));
13511 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13512 && !omp_is_reference (var)
13513 && !omp_is_allocatable_or_ptr (var))
13514 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13516 new_var = lookup_decl (var, ctx);
13517 new_var = DECL_VALUE_EXPR (new_var);
13518 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13519 new_var = TREE_OPERAND (new_var, 0);
13520 gcc_assert (DECL_P (new_var));
13521 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13522 gimple_seq_add_stmt (&assign_body,
13523 gimple_build_assign (new_var, x));
13525 else
13527 tree type = TREE_TYPE (var);
13528 new_var = lookup_decl (var, ctx);
13529 if (omp_is_reference (var))
13531 type = TREE_TYPE (type);
13532 if (POINTER_TYPE_P (type)
13533 && TREE_CODE (type) != ARRAY_TYPE
13534 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13535 || (omp_is_reference (var)
13536 && omp_is_allocatable_or_ptr (var))))
13538 tree v = create_tmp_var_raw (type, get_name (var));
13539 gimple_add_tmp_var (v);
13540 TREE_ADDRESSABLE (v) = 1;
13541 x = fold_convert (type, x);
13542 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13543 fb_rvalue);
13544 gimple_seq_add_stmt (&assign_body,
13545 gimple_build_assign (v, x));
13546 x = build_fold_addr_expr (v);
13547 do_optional_check = true;
13550 new_var = DECL_VALUE_EXPR (new_var);
13551 x = fold_convert (TREE_TYPE (new_var), x);
13552 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13553 gimple_seq_add_stmt (&assign_body,
13554 gimple_build_assign (new_var, x));
13556 tree present;
13557 present = (do_optional_check
13558 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13559 : NULL_TREE);
13560 if (present)
13562 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13563 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13564 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13565 glabel *null_glabel = gimple_build_label (null_label);
13566 glabel *notnull_glabel = gimple_build_label (notnull_label);
13567 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13568 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13569 fb_rvalue);
13570 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13571 fb_rvalue);
13572 gcond *cond = gimple_build_cond_from_tree (present,
13573 notnull_label,
13574 null_label);
13575 gimple_seq_add_stmt (&new_body, cond);
13576 gimple_seq_add_stmt (&new_body, null_glabel);
13577 gimplify_assign (new_var, null_pointer_node, &new_body);
13578 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13579 gimple_seq_add_stmt (&new_body, notnull_glabel);
13580 gimple_seq_add_seq (&new_body, assign_body);
13581 gimple_seq_add_stmt (&new_body,
13582 gimple_build_label (opt_arg_label));
13584 else
13585 gimple_seq_add_seq (&new_body, assign_body);
13586 break;
13588 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13589 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13590 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13591 or references to VLAs. */
13592 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13593 switch (OMP_CLAUSE_CODE (c))
13595 tree var;
13596 default:
13597 break;
13598 case OMP_CLAUSE_MAP:
13599 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13600 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13602 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13603 poly_int64 offset = 0;
13604 gcc_assert (prev);
13605 var = OMP_CLAUSE_DECL (c);
13606 if (DECL_P (var)
13607 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13608 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13609 ctx))
13610 && varpool_node::get_create (var)->offloadable)
13611 break;
13612 if (TREE_CODE (var) == INDIRECT_REF
13613 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
13614 var = TREE_OPERAND (var, 0);
13615 if (TREE_CODE (var) == COMPONENT_REF)
13617 var = get_addr_base_and_unit_offset (var, &offset);
13618 gcc_assert (var != NULL_TREE && DECL_P (var));
13620 else if (DECL_SIZE (var)
13621 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
13623 tree var2 = DECL_VALUE_EXPR (var);
13624 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
13625 var2 = TREE_OPERAND (var2, 0);
13626 gcc_assert (DECL_P (var2));
13627 var = var2;
13629 tree new_var = lookup_decl (var, ctx), x;
13630 tree type = TREE_TYPE (new_var);
13631 bool is_ref;
13632 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
13633 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13634 == COMPONENT_REF))
13636 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
13637 is_ref = true;
13638 new_var = build2 (MEM_REF, type,
13639 build_fold_addr_expr (new_var),
13640 build_int_cst (build_pointer_type (type),
13641 offset));
13643 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
13645 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
13646 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
13647 new_var = build2 (MEM_REF, type,
13648 build_fold_addr_expr (new_var),
13649 build_int_cst (build_pointer_type (type),
13650 offset));
13652 else
13653 is_ref = omp_is_reference (var);
13654 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13655 is_ref = false;
13656 bool ref_to_array = false;
13657 if (is_ref)
13659 type = TREE_TYPE (type);
13660 if (TREE_CODE (type) == ARRAY_TYPE)
13662 type = build_pointer_type (type);
13663 ref_to_array = true;
13666 else if (TREE_CODE (type) == ARRAY_TYPE)
13668 tree decl2 = DECL_VALUE_EXPR (new_var);
13669 gcc_assert (TREE_CODE (decl2) == MEM_REF);
13670 decl2 = TREE_OPERAND (decl2, 0);
13671 gcc_assert (DECL_P (decl2));
13672 new_var = decl2;
13673 type = TREE_TYPE (new_var);
13675 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
13676 x = fold_convert_loc (clause_loc, type, x);
13677 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
13679 tree bias = OMP_CLAUSE_SIZE (c);
13680 if (DECL_P (bias))
13681 bias = lookup_decl (bias, ctx);
13682 bias = fold_convert_loc (clause_loc, sizetype, bias);
13683 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
13684 bias);
13685 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
13686 TREE_TYPE (x), x, bias);
13688 if (ref_to_array)
13689 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13690 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13691 if (is_ref && !ref_to_array)
13693 tree t = create_tmp_var_raw (type, get_name (var));
13694 gimple_add_tmp_var (t);
13695 TREE_ADDRESSABLE (t) = 1;
13696 gimple_seq_add_stmt (&new_body,
13697 gimple_build_assign (t, x));
13698 x = build_fold_addr_expr_loc (clause_loc, t);
13700 gimple_seq_add_stmt (&new_body,
13701 gimple_build_assign (new_var, x));
13702 prev = NULL_TREE;
13704 else if (OMP_CLAUSE_CHAIN (c)
13705 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
13706 == OMP_CLAUSE_MAP
13707 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13708 == GOMP_MAP_FIRSTPRIVATE_POINTER
13709 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13710 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13711 prev = c;
13712 break;
13713 case OMP_CLAUSE_PRIVATE:
13714 var = OMP_CLAUSE_DECL (c);
13715 if (is_variable_sized (var))
13717 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13718 tree new_var = lookup_decl (var, ctx);
13719 tree pvar = DECL_VALUE_EXPR (var);
13720 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13721 pvar = TREE_OPERAND (pvar, 0);
13722 gcc_assert (DECL_P (pvar));
13723 tree new_pvar = lookup_decl (pvar, ctx);
13724 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13725 tree al = size_int (DECL_ALIGN (var));
13726 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
13727 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13728 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
13729 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13730 gimple_seq_add_stmt (&new_body,
13731 gimple_build_assign (new_pvar, x));
13733 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
13735 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13736 tree new_var = lookup_decl (var, ctx);
13737 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13738 if (TREE_CONSTANT (x))
13739 break;
13740 else
13742 tree atmp
13743 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13744 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
13745 tree al = size_int (TYPE_ALIGN (rtype));
13746 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13749 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13750 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13751 gimple_seq_add_stmt (&new_body,
13752 gimple_build_assign (new_var, x));
13754 break;
13757 gimple_seq fork_seq = NULL;
13758 gimple_seq join_seq = NULL;
13760 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
13762 /* If there are reductions on the offloaded region itself, treat
13763 them as a dummy GANG loop. */
13764 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
13766 gcall *private_marker = lower_oacc_private_marker (ctx);
13768 if (private_marker)
13769 gimple_call_set_arg (private_marker, 2, level);
13771 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
13772 false, NULL, private_marker, NULL, &fork_seq,
13773 &join_seq, ctx);
13776 gimple_seq_add_seq (&new_body, fork_seq);
13777 gimple_seq_add_seq (&new_body, tgt_body);
13778 gimple_seq_add_seq (&new_body, join_seq);
13780 if (offloaded)
13782 new_body = maybe_catch_exception (new_body);
13783 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
13785 gimple_omp_set_body (stmt, new_body);
13788 bind = gimple_build_bind (NULL, NULL,
13789 tgt_bind ? gimple_bind_block (tgt_bind)
13790 : NULL_TREE);
13791 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
13792 gimple_bind_add_seq (bind, ilist);
13793 gimple_bind_add_stmt (bind, stmt);
13794 gimple_bind_add_seq (bind, olist);
13796 pop_gimplify_context (NULL);
13798 if (dep_bind)
13800 gimple_bind_add_seq (dep_bind, dep_ilist);
13801 gimple_bind_add_stmt (dep_bind, bind);
13802 gimple_bind_add_seq (dep_bind, dep_olist);
13803 pop_gimplify_context (dep_bind);
13807 /* Expand code for an OpenMP teams directive. */
13809 static void
13810 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13812 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
13813 push_gimplify_context ();
13815 tree block = make_node (BLOCK);
13816 gbind *bind = gimple_build_bind (NULL, NULL, block);
13817 gsi_replace (gsi_p, bind, true);
13818 gimple_seq bind_body = NULL;
13819 gimple_seq dlist = NULL;
13820 gimple_seq olist = NULL;
13822 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13823 OMP_CLAUSE_NUM_TEAMS);
13824 if (num_teams == NULL_TREE)
13825 num_teams = build_int_cst (unsigned_type_node, 0);
13826 else
13828 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
13829 num_teams = fold_convert (unsigned_type_node, num_teams);
13830 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
13832 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13833 OMP_CLAUSE_THREAD_LIMIT);
13834 if (thread_limit == NULL_TREE)
13835 thread_limit = build_int_cst (unsigned_type_node, 0);
13836 else
13838 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
13839 thread_limit = fold_convert (unsigned_type_node, thread_limit);
13840 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
13841 fb_rvalue);
13844 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
13845 &bind_body, &dlist, ctx, NULL);
13846 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
13847 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
13848 NULL, ctx);
13849 gimple_seq_add_stmt (&bind_body, teams_stmt);
13851 location_t loc = gimple_location (teams_stmt);
13852 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
13853 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
13854 gimple_set_location (call, loc);
13855 gimple_seq_add_stmt (&bind_body, call);
13857 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
13858 gimple_omp_set_body (teams_stmt, NULL);
13859 gimple_seq_add_seq (&bind_body, olist);
13860 gimple_seq_add_seq (&bind_body, dlist);
13861 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
13862 gimple_bind_set_body (bind, bind_body);
13864 pop_gimplify_context (bind);
13866 gimple_bind_append_vars (bind, ctx->block_vars);
13867 BLOCK_VARS (block) = ctx->block_vars;
13868 if (BLOCK_VARS (block))
13869 TREE_USED (block) = 1;
13872 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
13873 regimplified. If DATA is non-NULL, lower_omp_1 is outside
13874 of OMP context, but with task_shared_vars set. */
13876 static tree
13877 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
13878 void *data)
13880 tree t = *tp;
13882 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
13883 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
13884 && data == NULL
13885 && DECL_HAS_VALUE_EXPR_P (t))
13886 return t;
13888 if (task_shared_vars
13889 && DECL_P (t)
13890 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
13891 return t;
13893 /* If a global variable has been privatized, TREE_CONSTANT on
13894 ADDR_EXPR might be wrong. */
13895 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
13896 recompute_tree_invariant_for_addr_expr (t);
13898 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
13899 return NULL_TREE;
13902 /* Data to be communicated between lower_omp_regimplify_operands and
13903 lower_omp_regimplify_operands_p. */
13905 struct lower_omp_regimplify_operands_data
13907 omp_context *ctx;
13908 vec<tree> *decls;
13911 /* Helper function for lower_omp_regimplify_operands. Find
13912 omp_member_access_dummy_var vars and adjust temporarily their
13913 DECL_VALUE_EXPRs if needed. */
13915 static tree
13916 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
13917 void *data)
13919 tree t = omp_member_access_dummy_var (*tp);
13920 if (t)
13922 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
13923 lower_omp_regimplify_operands_data *ldata
13924 = (lower_omp_regimplify_operands_data *) wi->info;
13925 tree o = maybe_lookup_decl (t, ldata->ctx);
13926 if (o != t)
13928 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
13929 ldata->decls->safe_push (*tp);
13930 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
13931 SET_DECL_VALUE_EXPR (*tp, v);
13934 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
13935 return NULL_TREE;
13938 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
13939 of omp_member_access_dummy_var vars during regimplification. */
13941 static void
13942 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
13943 gimple_stmt_iterator *gsi_p)
13945 auto_vec<tree, 10> decls;
13946 if (ctx)
13948 struct walk_stmt_info wi;
13949 memset (&wi, '\0', sizeof (wi));
13950 struct lower_omp_regimplify_operands_data data;
13951 data.ctx = ctx;
13952 data.decls = &decls;
13953 wi.info = &data;
13954 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
13956 gimple_regimplify_operands (stmt, gsi_p);
13957 while (!decls.is_empty ())
13959 tree t = decls.pop ();
13960 tree v = decls.pop ();
13961 SET_DECL_VALUE_EXPR (t, v);
13965 static void
13966 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13968 gimple *stmt = gsi_stmt (*gsi_p);
13969 struct walk_stmt_info wi;
13970 gcall *call_stmt;
13972 if (gimple_has_location (stmt))
13973 input_location = gimple_location (stmt);
13975 if (task_shared_vars)
13976 memset (&wi, '\0', sizeof (wi));
13978 /* If we have issued syntax errors, avoid doing any heavy lifting.
13979 Just replace the OMP directives with a NOP to avoid
13980 confusing RTL expansion. */
13981 if (seen_error () && is_gimple_omp (stmt))
13983 gsi_replace (gsi_p, gimple_build_nop (), true);
13984 return;
13987 switch (gimple_code (stmt))
13989 case GIMPLE_COND:
13991 gcond *cond_stmt = as_a <gcond *> (stmt);
13992 if ((ctx || task_shared_vars)
13993 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
13994 lower_omp_regimplify_p,
13995 ctx ? NULL : &wi, NULL)
13996 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
13997 lower_omp_regimplify_p,
13998 ctx ? NULL : &wi, NULL)))
13999 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14001 break;
14002 case GIMPLE_CATCH:
14003 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14004 break;
14005 case GIMPLE_EH_FILTER:
14006 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14007 break;
14008 case GIMPLE_TRY:
14009 lower_omp (gimple_try_eval_ptr (stmt), ctx);
14010 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14011 break;
14012 case GIMPLE_TRANSACTION:
14013 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14014 ctx);
14015 break;
14016 case GIMPLE_BIND:
14017 if (ctx && is_gimple_omp_oacc (ctx->stmt))
14019 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14020 oacc_privatization_scan_decl_chain (ctx, vars);
14022 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14023 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14024 break;
14025 case GIMPLE_OMP_PARALLEL:
14026 case GIMPLE_OMP_TASK:
14027 ctx = maybe_lookup_ctx (stmt);
14028 gcc_assert (ctx);
14029 if (ctx->cancellable)
14030 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14031 lower_omp_taskreg (gsi_p, ctx);
14032 break;
14033 case GIMPLE_OMP_FOR:
14034 ctx = maybe_lookup_ctx (stmt);
14035 gcc_assert (ctx);
14036 if (ctx->cancellable)
14037 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14038 lower_omp_for (gsi_p, ctx);
14039 break;
14040 case GIMPLE_OMP_SECTIONS:
14041 ctx = maybe_lookup_ctx (stmt);
14042 gcc_assert (ctx);
14043 if (ctx->cancellable)
14044 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14045 lower_omp_sections (gsi_p, ctx);
14046 break;
14047 case GIMPLE_OMP_SCOPE:
14048 ctx = maybe_lookup_ctx (stmt);
14049 gcc_assert (ctx);
14050 lower_omp_scope (gsi_p, ctx);
14051 break;
14052 case GIMPLE_OMP_SINGLE:
14053 ctx = maybe_lookup_ctx (stmt);
14054 gcc_assert (ctx);
14055 lower_omp_single (gsi_p, ctx);
14056 break;
14057 case GIMPLE_OMP_MASTER:
14058 case GIMPLE_OMP_MASKED:
14059 ctx = maybe_lookup_ctx (stmt);
14060 gcc_assert (ctx);
14061 lower_omp_master (gsi_p, ctx);
14062 break;
14063 case GIMPLE_OMP_TASKGROUP:
14064 ctx = maybe_lookup_ctx (stmt);
14065 gcc_assert (ctx);
14066 lower_omp_taskgroup (gsi_p, ctx);
14067 break;
14068 case GIMPLE_OMP_ORDERED:
14069 ctx = maybe_lookup_ctx (stmt);
14070 gcc_assert (ctx);
14071 lower_omp_ordered (gsi_p, ctx);
14072 break;
14073 case GIMPLE_OMP_SCAN:
14074 ctx = maybe_lookup_ctx (stmt);
14075 gcc_assert (ctx);
14076 lower_omp_scan (gsi_p, ctx);
14077 break;
14078 case GIMPLE_OMP_CRITICAL:
14079 ctx = maybe_lookup_ctx (stmt);
14080 gcc_assert (ctx);
14081 lower_omp_critical (gsi_p, ctx);
14082 break;
14083 case GIMPLE_OMP_ATOMIC_LOAD:
14084 if ((ctx || task_shared_vars)
14085 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14086 as_a <gomp_atomic_load *> (stmt)),
14087 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14088 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14089 break;
14090 case GIMPLE_OMP_TARGET:
14091 ctx = maybe_lookup_ctx (stmt);
14092 gcc_assert (ctx);
14093 lower_omp_target (gsi_p, ctx);
14094 break;
14095 case GIMPLE_OMP_TEAMS:
14096 ctx = maybe_lookup_ctx (stmt);
14097 gcc_assert (ctx);
14098 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14099 lower_omp_taskreg (gsi_p, ctx);
14100 else
14101 lower_omp_teams (gsi_p, ctx);
14102 break;
14103 case GIMPLE_CALL:
14104 tree fndecl;
14105 call_stmt = as_a <gcall *> (stmt);
14106 fndecl = gimple_call_fndecl (call_stmt);
14107 if (fndecl
14108 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14109 switch (DECL_FUNCTION_CODE (fndecl))
14111 case BUILT_IN_GOMP_BARRIER:
14112 if (ctx == NULL)
14113 break;
14114 /* FALLTHRU */
14115 case BUILT_IN_GOMP_CANCEL:
14116 case BUILT_IN_GOMP_CANCELLATION_POINT:
14117 omp_context *cctx;
14118 cctx = ctx;
14119 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14120 cctx = cctx->outer;
14121 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14122 if (!cctx->cancellable)
14124 if (DECL_FUNCTION_CODE (fndecl)
14125 == BUILT_IN_GOMP_CANCELLATION_POINT)
14127 stmt = gimple_build_nop ();
14128 gsi_replace (gsi_p, stmt, false);
14130 break;
14132 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14134 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14135 gimple_call_set_fndecl (call_stmt, fndecl);
14136 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14138 tree lhs;
14139 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14140 gimple_call_set_lhs (call_stmt, lhs);
14141 tree fallthru_label;
14142 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14143 gimple *g;
14144 g = gimple_build_label (fallthru_label);
14145 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14146 g = gimple_build_cond (NE_EXPR, lhs,
14147 fold_convert (TREE_TYPE (lhs),
14148 boolean_false_node),
14149 cctx->cancel_label, fallthru_label);
14150 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14151 break;
14152 default:
14153 break;
14155 goto regimplify;
14157 case GIMPLE_ASSIGN:
14158 for (omp_context *up = ctx; up; up = up->outer)
14160 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14161 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14162 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14163 || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14164 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14165 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14166 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14167 && (gimple_omp_target_kind (up->stmt)
14168 == GF_OMP_TARGET_KIND_DATA)))
14169 continue;
14170 else if (!up->lastprivate_conditional_map)
14171 break;
14172 tree lhs = get_base_address (gimple_assign_lhs (stmt));
14173 if (TREE_CODE (lhs) == MEM_REF
14174 && DECL_P (TREE_OPERAND (lhs, 0))
14175 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14176 0))) == REFERENCE_TYPE)
14177 lhs = TREE_OPERAND (lhs, 0);
14178 if (DECL_P (lhs))
14179 if (tree *v = up->lastprivate_conditional_map->get (lhs))
14181 tree clauses;
14182 if (up->combined_into_simd_safelen1)
14184 up = up->outer;
14185 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14186 up = up->outer;
14188 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14189 clauses = gimple_omp_for_clauses (up->stmt);
14190 else
14191 clauses = gimple_omp_sections_clauses (up->stmt);
14192 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14193 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14194 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14195 OMP_CLAUSE__CONDTEMP_);
14196 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14197 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14198 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14201 /* FALLTHRU */
14203 default:
14204 regimplify:
14205 if ((ctx || task_shared_vars)
14206 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14207 ctx ? NULL : &wi))
14209 /* Just remove clobbers, this should happen only if we have
14210 "privatized" local addressable variables in SIMD regions,
14211 the clobber isn't needed in that case and gimplifying address
14212 of the ARRAY_REF into a pointer and creating MEM_REF based
14213 clobber would create worse code than we get with the clobber
14214 dropped. */
14215 if (gimple_clobber_p (stmt))
14217 gsi_replace (gsi_p, gimple_build_nop (), true);
14218 break;
14220 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14222 break;
14226 static void
14227 lower_omp (gimple_seq *body, omp_context *ctx)
14229 location_t saved_location = input_location;
14230 gimple_stmt_iterator gsi;
14231 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14232 lower_omp_1 (&gsi, ctx);
14233 /* During gimplification, we haven't folded statments inside offloading
14234 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
14235 if (target_nesting_level || taskreg_nesting_level)
14236 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14237 fold_stmt (&gsi);
14238 input_location = saved_location;
14241 /* Main entry point. */
14243 static unsigned int
14244 execute_lower_omp (void)
14246 gimple_seq body;
14247 int i;
14248 omp_context *ctx;
14250 /* This pass always runs, to provide PROP_gimple_lomp.
14251 But often, there is nothing to do. */
14252 if (flag_openacc == 0 && flag_openmp == 0
14253 && flag_openmp_simd == 0)
14254 return 0;
14256 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14257 delete_omp_context);
14259 body = gimple_body (current_function_decl);
14261 scan_omp (&body, NULL);
14262 gcc_assert (taskreg_nesting_level == 0);
14263 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14264 finish_taskreg_scan (ctx);
14265 taskreg_contexts.release ();
14267 if (all_contexts->root)
14269 if (task_shared_vars)
14270 push_gimplify_context ();
14271 lower_omp (&body, NULL);
14272 if (task_shared_vars)
14273 pop_gimplify_context (NULL);
14276 if (all_contexts)
14278 splay_tree_delete (all_contexts);
14279 all_contexts = NULL;
14281 BITMAP_FREE (task_shared_vars);
14282 BITMAP_FREE (global_nonaddressable_vars);
14284 /* If current function is a method, remove artificial dummy VAR_DECL created
14285 for non-static data member privatization, they aren't needed for
14286 debuginfo nor anything else, have been already replaced everywhere in the
14287 IL and cause problems with LTO. */
14288 if (DECL_ARGUMENTS (current_function_decl)
14289 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14290 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14291 == POINTER_TYPE))
14292 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14293 return 0;
14296 namespace {
14298 const pass_data pass_data_lower_omp =
14300 GIMPLE_PASS, /* type */
14301 "omplower", /* name */
14302 OPTGROUP_OMP, /* optinfo_flags */
14303 TV_NONE, /* tv_id */
14304 PROP_gimple_any, /* properties_required */
14305 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14306 0, /* properties_destroyed */
14307 0, /* todo_flags_start */
14308 0, /* todo_flags_finish */
14311 class pass_lower_omp : public gimple_opt_pass
14313 public:
14314 pass_lower_omp (gcc::context *ctxt)
14315 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14318 /* opt_pass methods: */
14319 virtual unsigned int execute (function *) { return execute_lower_omp (); }
14321 }; // class pass_lower_omp
14323 } // anon namespace
14325 gimple_opt_pass *
14326 make_pass_lower_omp (gcc::context *ctxt)
14328 return new pass_lower_omp (ctxt);
14331 /* The following is a utility to diagnose structured block violations.
14332 It is not part of the "omplower" pass, as that's invoked too late. It
14333 should be invoked by the respective front ends after gimplification. */
14335 static splay_tree all_labels;
14337 /* Check for mismatched contexts and generate an error if needed. Return
14338 true if an error is detected. */
14340 static bool
14341 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14342 gimple *branch_ctx, gimple *label_ctx)
14344 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14345 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14347 if (label_ctx == branch_ctx)
14348 return false;
14350 const char* kind = NULL;
14352 if (flag_openacc)
14354 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14355 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14357 gcc_checking_assert (kind == NULL);
14358 kind = "OpenACC";
14361 if (kind == NULL)
14363 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14364 kind = "OpenMP";
14367 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14368 so we could traverse it and issue a correct "exit" or "enter" error
14369 message upon a structured block violation.
14371 We built the context by building a list with tree_cons'ing, but there is
14372 no easy counterpart in gimple tuples. It seems like far too much work
14373 for issuing exit/enter error messages. If someone really misses the
14374 distinct error message... patches welcome. */
14376 #if 0
14377 /* Try to avoid confusing the user by producing and error message
14378 with correct "exit" or "enter" verbiage. We prefer "exit"
14379 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14380 if (branch_ctx == NULL)
14381 exit_p = false;
14382 else
14384 while (label_ctx)
14386 if (TREE_VALUE (label_ctx) == branch_ctx)
14388 exit_p = false;
14389 break;
14391 label_ctx = TREE_CHAIN (label_ctx);
14395 if (exit_p)
14396 error ("invalid exit from %s structured block", kind);
14397 else
14398 error ("invalid entry to %s structured block", kind);
14399 #endif
14401 /* If it's obvious we have an invalid entry, be specific about the error. */
14402 if (branch_ctx == NULL)
14403 error ("invalid entry to %s structured block", kind);
14404 else
14406 /* Otherwise, be vague and lazy, but efficient. */
14407 error ("invalid branch to/from %s structured block", kind);
14410 gsi_replace (gsi_p, gimple_build_nop (), false);
14411 return true;
14414 /* Pass 1: Create a minimal tree of structured blocks, and record
14415 where each label is found. */
14417 static tree
14418 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14419 struct walk_stmt_info *wi)
14421 gimple *context = (gimple *) wi->info;
14422 gimple *inner_context;
14423 gimple *stmt = gsi_stmt (*gsi_p);
14425 *handled_ops_p = true;
14427 switch (gimple_code (stmt))
14429 WALK_SUBSTMTS;
14431 case GIMPLE_OMP_PARALLEL:
14432 case GIMPLE_OMP_TASK:
14433 case GIMPLE_OMP_SCOPE:
14434 case GIMPLE_OMP_SECTIONS:
14435 case GIMPLE_OMP_SINGLE:
14436 case GIMPLE_OMP_SECTION:
14437 case GIMPLE_OMP_MASTER:
14438 case GIMPLE_OMP_MASKED:
14439 case GIMPLE_OMP_ORDERED:
14440 case GIMPLE_OMP_SCAN:
14441 case GIMPLE_OMP_CRITICAL:
14442 case GIMPLE_OMP_TARGET:
14443 case GIMPLE_OMP_TEAMS:
14444 case GIMPLE_OMP_TASKGROUP:
14445 /* The minimal context here is just the current OMP construct. */
14446 inner_context = stmt;
14447 wi->info = inner_context;
14448 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14449 wi->info = context;
14450 break;
14452 case GIMPLE_OMP_FOR:
14453 inner_context = stmt;
14454 wi->info = inner_context;
14455 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14456 walk them. */
14457 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14458 diagnose_sb_1, NULL, wi);
14459 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14460 wi->info = context;
14461 break;
14463 case GIMPLE_LABEL:
14464 splay_tree_insert (all_labels,
14465 (splay_tree_key) gimple_label_label (
14466 as_a <glabel *> (stmt)),
14467 (splay_tree_value) context);
14468 break;
14470 default:
14471 break;
14474 return NULL_TREE;
14477 /* Pass 2: Check each branch and see if its context differs from that of
14478 the destination label's context. */
14480 static tree
14481 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14482 struct walk_stmt_info *wi)
14484 gimple *context = (gimple *) wi->info;
14485 splay_tree_node n;
14486 gimple *stmt = gsi_stmt (*gsi_p);
14488 *handled_ops_p = true;
14490 switch (gimple_code (stmt))
14492 WALK_SUBSTMTS;
14494 case GIMPLE_OMP_PARALLEL:
14495 case GIMPLE_OMP_TASK:
14496 case GIMPLE_OMP_SCOPE:
14497 case GIMPLE_OMP_SECTIONS:
14498 case GIMPLE_OMP_SINGLE:
14499 case GIMPLE_OMP_SECTION:
14500 case GIMPLE_OMP_MASTER:
14501 case GIMPLE_OMP_MASKED:
14502 case GIMPLE_OMP_ORDERED:
14503 case GIMPLE_OMP_SCAN:
14504 case GIMPLE_OMP_CRITICAL:
14505 case GIMPLE_OMP_TARGET:
14506 case GIMPLE_OMP_TEAMS:
14507 case GIMPLE_OMP_TASKGROUP:
14508 wi->info = stmt;
14509 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14510 wi->info = context;
14511 break;
14513 case GIMPLE_OMP_FOR:
14514 wi->info = stmt;
14515 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14516 walk them. */
14517 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14518 diagnose_sb_2, NULL, wi);
14519 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14520 wi->info = context;
14521 break;
14523 case GIMPLE_COND:
14525 gcond *cond_stmt = as_a <gcond *> (stmt);
14526 tree lab = gimple_cond_true_label (cond_stmt);
14527 if (lab)
14529 n = splay_tree_lookup (all_labels,
14530 (splay_tree_key) lab);
14531 diagnose_sb_0 (gsi_p, context,
14532 n ? (gimple *) n->value : NULL);
14534 lab = gimple_cond_false_label (cond_stmt);
14535 if (lab)
14537 n = splay_tree_lookup (all_labels,
14538 (splay_tree_key) lab);
14539 diagnose_sb_0 (gsi_p, context,
14540 n ? (gimple *) n->value : NULL);
14543 break;
14545 case GIMPLE_GOTO:
14547 tree lab = gimple_goto_dest (stmt);
14548 if (TREE_CODE (lab) != LABEL_DECL)
14549 break;
14551 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14552 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14554 break;
14556 case GIMPLE_SWITCH:
14558 gswitch *switch_stmt = as_a <gswitch *> (stmt);
14559 unsigned int i;
14560 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
14562 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
14563 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14564 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
14565 break;
14568 break;
14570 case GIMPLE_RETURN:
14571 diagnose_sb_0 (gsi_p, context, NULL);
14572 break;
14574 default:
14575 break;
14578 return NULL_TREE;
14581 static unsigned int
14582 diagnose_omp_structured_block_errors (void)
14584 struct walk_stmt_info wi;
14585 gimple_seq body = gimple_body (current_function_decl);
14587 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
14589 memset (&wi, 0, sizeof (wi));
14590 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
14592 memset (&wi, 0, sizeof (wi));
14593 wi.want_locations = true;
14594 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
14596 gimple_set_body (current_function_decl, body);
14598 splay_tree_delete (all_labels);
14599 all_labels = NULL;
14601 return 0;
14604 namespace {
14606 const pass_data pass_data_diagnose_omp_blocks =
14608 GIMPLE_PASS, /* type */
14609 "*diagnose_omp_blocks", /* name */
14610 OPTGROUP_OMP, /* optinfo_flags */
14611 TV_NONE, /* tv_id */
14612 PROP_gimple_any, /* properties_required */
14613 0, /* properties_provided */
14614 0, /* properties_destroyed */
14615 0, /* todo_flags_start */
14616 0, /* todo_flags_finish */
14619 class pass_diagnose_omp_blocks : public gimple_opt_pass
14621 public:
14622 pass_diagnose_omp_blocks (gcc::context *ctxt)
14623 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
14626 /* opt_pass methods: */
14627 virtual bool gate (function *)
14629 return flag_openacc || flag_openmp || flag_openmp_simd;
14631 virtual unsigned int execute (function *)
14633 return diagnose_omp_structured_block_errors ();
14636 }; // class pass_diagnose_omp_blocks
14638 } // anon namespace
14640 gimple_opt_pass *
14641 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
14643 return new pass_diagnose_omp_blocks (ctxt);
14647 #include "gt-omp-low.h"