[Ada] Add adequate guard before calling First_Rep_Item
[official-gcc.git] / gcc / omp-low.c
blob26c5c0261e93c6657f81d1f8664523b246fa3663
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2021 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
154 /* True if this construct can be cancelled. */
155 bool cancellable;
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap task_shared_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
195 static void scan_omp (gimple_seq *, omp_context *);
196 static tree scan_omp_1_op (tree *, int *, void *);
198 #define WALK_SUBSTMTS \
199 case GIMPLE_BIND: \
200 case GIMPLE_TRY: \
201 case GIMPLE_CATCH: \
202 case GIMPLE_EH_FILTER: \
203 case GIMPLE_TRANSACTION: \
204 /* The sub-statements for these should be walked. */ \
205 *handled_ops_p = false; \
206 break;
208 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
209 (This doesn't include OpenACC 'kernels' decomposed parts.) */
211 static bool
212 is_oacc_parallel_or_serial (omp_context *ctx)
214 enum gimple_code outer_type = gimple_code (ctx->stmt);
215 return ((outer_type == GIMPLE_OMP_TARGET)
216 && ((gimple_omp_target_kind (ctx->stmt)
217 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
218 || (gimple_omp_target_kind (ctx->stmt)
219 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
222 /* Return whether CTX represents an OpenACC 'kernels' construct.
223 (This doesn't include OpenACC 'kernels' decomposed parts.) */
225 static bool
226 is_oacc_kernels (omp_context *ctx)
228 enum gimple_code outer_type = gimple_code (ctx->stmt);
229 return ((outer_type == GIMPLE_OMP_TARGET)
230 && (gimple_omp_target_kind (ctx->stmt)
231 == GF_OMP_TARGET_KIND_OACC_KERNELS));
234 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
236 static bool
237 is_oacc_kernels_decomposed_part (omp_context *ctx)
239 enum gimple_code outer_type = gimple_code (ctx->stmt);
240 return ((outer_type == GIMPLE_OMP_TARGET)
241 && ((gimple_omp_target_kind (ctx->stmt)
242 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
243 || (gimple_omp_target_kind (ctx->stmt)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
245 || (gimple_omp_target_kind (ctx->stmt)
246 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
249 /* Return true if STMT corresponds to an OpenMP target region. */
250 static bool
251 is_omp_target (gimple *stmt)
253 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
255 int kind = gimple_omp_target_kind (stmt);
256 return (kind == GF_OMP_TARGET_KIND_REGION
257 || kind == GF_OMP_TARGET_KIND_DATA
258 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
259 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
261 return false;
264 /* If DECL is the artificial dummy VAR_DECL created for non-static
265 data member privatization, return the underlying "this" parameter,
266 otherwise return NULL. */
268 tree
269 omp_member_access_dummy_var (tree decl)
271 if (!VAR_P (decl)
272 || !DECL_ARTIFICIAL (decl)
273 || !DECL_IGNORED_P (decl)
274 || !DECL_HAS_VALUE_EXPR_P (decl)
275 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
276 return NULL_TREE;
278 tree v = DECL_VALUE_EXPR (decl);
279 if (TREE_CODE (v) != COMPONENT_REF)
280 return NULL_TREE;
282 while (1)
283 switch (TREE_CODE (v))
285 case COMPONENT_REF:
286 case MEM_REF:
287 case INDIRECT_REF:
288 CASE_CONVERT:
289 case POINTER_PLUS_EXPR:
290 v = TREE_OPERAND (v, 0);
291 continue;
292 case PARM_DECL:
293 if (DECL_CONTEXT (v) == current_function_decl
294 && DECL_ARTIFICIAL (v)
295 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
296 return v;
297 return NULL_TREE;
298 default:
299 return NULL_TREE;
303 /* Helper for unshare_and_remap, called through walk_tree. */
305 static tree
306 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
308 tree *pair = (tree *) data;
309 if (*tp == pair[0])
311 *tp = unshare_expr (pair[1]);
312 *walk_subtrees = 0;
314 else if (IS_TYPE_OR_DECL_P (*tp))
315 *walk_subtrees = 0;
316 return NULL_TREE;
319 /* Return unshare_expr (X) with all occurrences of FROM
320 replaced with TO. */
322 static tree
323 unshare_and_remap (tree x, tree from, tree to)
325 tree pair[2] = { from, to };
326 x = unshare_expr (x);
327 walk_tree (&x, unshare_and_remap_1, pair, NULL);
328 return x;
331 /* Convenience function for calling scan_omp_1_op on tree operands. */
333 static inline tree
334 scan_omp_op (tree *tp, omp_context *ctx)
336 struct walk_stmt_info wi;
338 memset (&wi, 0, sizeof (wi));
339 wi.info = ctx;
340 wi.want_locations = true;
342 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
345 static void lower_omp (gimple_seq *, omp_context *);
346 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
347 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
349 /* Return true if CTX is for an omp parallel. */
351 static inline bool
352 is_parallel_ctx (omp_context *ctx)
354 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
358 /* Return true if CTX is for an omp task. */
360 static inline bool
361 is_task_ctx (omp_context *ctx)
363 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
367 /* Return true if CTX is for an omp taskloop. */
369 static inline bool
370 is_taskloop_ctx (omp_context *ctx)
372 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
373 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
377 /* Return true if CTX is for a host omp teams. */
379 static inline bool
380 is_host_teams_ctx (omp_context *ctx)
382 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
383 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
386 /* Return true if CTX is for an omp parallel or omp task or host omp teams
387 (the last one is strictly not a task region in OpenMP speak, but we
388 need to treat it similarly). */
390 static inline bool
391 is_taskreg_ctx (omp_context *ctx)
393 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
396 /* Return true if EXPR is variable sized. */
398 static inline bool
399 is_variable_sized (const_tree expr)
401 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
404 /* Lookup variables. The "maybe" form
405 allows for the variable form to not have been entered, otherwise we
406 assert that the variable must have been entered. */
408 static inline tree
409 lookup_decl (tree var, omp_context *ctx)
411 tree *n = ctx->cb.decl_map->get (var);
412 return *n;
415 static inline tree
416 maybe_lookup_decl (const_tree var, omp_context *ctx)
418 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
419 return n ? *n : NULL_TREE;
422 static inline tree
423 lookup_field (tree var, omp_context *ctx)
425 splay_tree_node n;
426 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
427 return (tree) n->value;
430 static inline tree
431 lookup_sfield (splay_tree_key key, omp_context *ctx)
433 splay_tree_node n;
434 n = splay_tree_lookup (ctx->sfield_map
435 ? ctx->sfield_map : ctx->field_map, key);
436 return (tree) n->value;
439 static inline tree
440 lookup_sfield (tree var, omp_context *ctx)
442 return lookup_sfield ((splay_tree_key) var, ctx);
445 static inline tree
446 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
448 splay_tree_node n;
449 n = splay_tree_lookup (ctx->field_map, key);
450 return n ? (tree) n->value : NULL_TREE;
453 static inline tree
454 maybe_lookup_field (tree var, omp_context *ctx)
456 return maybe_lookup_field ((splay_tree_key) var, ctx);
459 /* Return true if DECL should be copied by pointer. SHARED_CTX is
460 the parallel context if DECL is to be shared. */
462 static bool
463 use_pointer_for_field (tree decl, omp_context *shared_ctx)
465 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
466 || TYPE_ATOMIC (TREE_TYPE (decl)))
467 return true;
469 /* We can only use copy-in/copy-out semantics for shared variables
470 when we know the value is not accessible from an outer scope. */
471 if (shared_ctx)
473 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
475 /* ??? Trivially accessible from anywhere. But why would we even
476 be passing an address in this case? Should we simply assert
477 this to be false, or should we have a cleanup pass that removes
478 these from the list of mappings? */
479 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
480 return true;
482 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
483 without analyzing the expression whether or not its location
484 is accessible to anyone else. In the case of nested parallel
485 regions it certainly may be. */
486 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
487 return true;
489 /* Do not use copy-in/copy-out for variables that have their
490 address taken. */
491 if (is_global_var (decl))
493 /* For file scope vars, track whether we've seen them as
494 non-addressable initially and in that case, keep the same
495 answer for the duration of the pass, even when they are made
496 addressable later on e.g. through reduction expansion. Global
497 variables which weren't addressable before the pass will not
498 have their privatized copies address taken. See PR91216. */
499 if (!TREE_ADDRESSABLE (decl))
501 if (!global_nonaddressable_vars)
502 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
503 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
505 else if (!global_nonaddressable_vars
506 || !bitmap_bit_p (global_nonaddressable_vars,
507 DECL_UID (decl)))
508 return true;
510 else if (TREE_ADDRESSABLE (decl))
511 return true;
513 /* lower_send_shared_vars only uses copy-in, but not copy-out
514 for these. */
515 if (TREE_READONLY (decl)
516 || ((TREE_CODE (decl) == RESULT_DECL
517 || TREE_CODE (decl) == PARM_DECL)
518 && DECL_BY_REFERENCE (decl)))
519 return false;
521 /* Disallow copy-in/out in nested parallel if
522 decl is shared in outer parallel, otherwise
523 each thread could store the shared variable
524 in its own copy-in location, making the
525 variable no longer really shared. */
526 if (shared_ctx->is_nested)
528 omp_context *up;
530 for (up = shared_ctx->outer; up; up = up->outer)
531 if ((is_taskreg_ctx (up)
532 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
533 && is_gimple_omp_offloaded (up->stmt)))
534 && maybe_lookup_decl (decl, up))
535 break;
537 if (up)
539 tree c;
541 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
543 for (c = gimple_omp_target_clauses (up->stmt);
544 c; c = OMP_CLAUSE_CHAIN (c))
545 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
546 && OMP_CLAUSE_DECL (c) == decl)
547 break;
549 else
550 for (c = gimple_omp_taskreg_clauses (up->stmt);
551 c; c = OMP_CLAUSE_CHAIN (c))
552 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
553 && OMP_CLAUSE_DECL (c) == decl)
554 break;
556 if (c)
557 goto maybe_mark_addressable_and_ret;
561 /* For tasks avoid using copy-in/out. As tasks can be
562 deferred or executed in different thread, when GOMP_task
563 returns, the task hasn't necessarily terminated. */
564 if (is_task_ctx (shared_ctx))
566 tree outer;
567 maybe_mark_addressable_and_ret:
568 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
569 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
571 /* Taking address of OUTER in lower_send_shared_vars
572 might need regimplification of everything that uses the
573 variable. */
574 if (!task_shared_vars)
575 task_shared_vars = BITMAP_ALLOC (NULL);
576 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
577 TREE_ADDRESSABLE (outer) = 1;
579 return true;
583 return false;
586 /* Construct a new automatic decl similar to VAR. */
588 static tree
589 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
591 tree copy = copy_var_decl (var, name, type);
593 DECL_CONTEXT (copy) = current_function_decl;
594 DECL_CHAIN (copy) = ctx->block_vars;
595 /* If VAR is listed in task_shared_vars, it means it wasn't
596 originally addressable and is just because task needs to take
597 it's address. But we don't need to take address of privatizations
598 from that var. */
599 if (TREE_ADDRESSABLE (var)
600 && ((task_shared_vars
601 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
602 || (global_nonaddressable_vars
603 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
604 TREE_ADDRESSABLE (copy) = 0;
605 ctx->block_vars = copy;
607 return copy;
610 static tree
611 omp_copy_decl_1 (tree var, omp_context *ctx)
613 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
616 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
617 as appropriate. */
618 /* See also 'gcc/omp-oacc-neuter-broadcast.cc:oacc_build_component_ref'. */
620 static tree
621 omp_build_component_ref (tree obj, tree field)
623 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
624 if (TREE_THIS_VOLATILE (field))
625 TREE_THIS_VOLATILE (ret) |= 1;
626 if (TREE_READONLY (field))
627 TREE_READONLY (ret) |= 1;
628 return ret;
631 /* Build tree nodes to access the field for VAR on the receiver side. */
633 static tree
634 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
636 tree x, field = lookup_field (var, ctx);
638 /* If the receiver record type was remapped in the child function,
639 remap the field into the new record type. */
640 x = maybe_lookup_field (field, ctx);
641 if (x != NULL)
642 field = x;
644 x = build_simple_mem_ref (ctx->receiver_decl);
645 TREE_THIS_NOTRAP (x) = 1;
646 x = omp_build_component_ref (x, field);
647 if (by_ref)
649 x = build_simple_mem_ref (x);
650 TREE_THIS_NOTRAP (x) = 1;
653 return x;
656 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
657 of a parallel, this is a component reference; for workshare constructs
658 this is some variable. */
660 static tree
661 build_outer_var_ref (tree var, omp_context *ctx,
662 enum omp_clause_code code = OMP_CLAUSE_ERROR)
664 tree x;
665 omp_context *outer = ctx->outer;
666 for (; outer; outer = outer->outer)
668 if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
669 continue;
670 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
671 && !maybe_lookup_decl (var, outer))
672 continue;
673 break;
676 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
677 x = var;
678 else if (is_variable_sized (var))
680 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
681 x = build_outer_var_ref (x, ctx, code);
682 x = build_simple_mem_ref (x);
684 else if (is_taskreg_ctx (ctx))
686 bool by_ref = use_pointer_for_field (var, NULL);
687 x = build_receiver_ref (var, by_ref, ctx);
689 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
690 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
691 || ctx->loop_p
692 || (code == OMP_CLAUSE_PRIVATE
693 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
694 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
695 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
697 /* #pragma omp simd isn't a worksharing construct, and can reference
698 even private vars in its linear etc. clauses.
699 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
700 to private vars in all worksharing constructs. */
701 x = NULL_TREE;
702 if (outer && is_taskreg_ctx (outer))
703 x = lookup_decl (var, outer);
704 else if (outer)
705 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
706 if (x == NULL_TREE)
707 x = var;
709 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
711 gcc_assert (outer);
712 splay_tree_node n
713 = splay_tree_lookup (outer->field_map,
714 (splay_tree_key) &DECL_UID (var));
715 if (n == NULL)
717 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
718 x = var;
719 else
720 x = lookup_decl (var, outer);
722 else
724 tree field = (tree) n->value;
725 /* If the receiver record type was remapped in the child function,
726 remap the field into the new record type. */
727 x = maybe_lookup_field (field, outer);
728 if (x != NULL)
729 field = x;
731 x = build_simple_mem_ref (outer->receiver_decl);
732 x = omp_build_component_ref (x, field);
733 if (use_pointer_for_field (var, outer))
734 x = build_simple_mem_ref (x);
737 else if (outer)
738 x = lookup_decl (var, outer);
739 else if (omp_privatize_by_reference (var))
740 /* This can happen with orphaned constructs. If var is reference, it is
741 possible it is shared and as such valid. */
742 x = var;
743 else if (omp_member_access_dummy_var (var))
744 x = var;
745 else
746 gcc_unreachable ();
748 if (x == var)
750 tree t = omp_member_access_dummy_var (var);
751 if (t)
753 x = DECL_VALUE_EXPR (var);
754 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
755 if (o != t)
756 x = unshare_and_remap (x, t, o);
757 else
758 x = unshare_expr (x);
762 if (omp_privatize_by_reference (var))
763 x = build_simple_mem_ref (x);
765 return x;
768 /* Build tree nodes to access the field for VAR on the sender side. */
770 static tree
771 build_sender_ref (splay_tree_key key, omp_context *ctx)
773 tree field = lookup_sfield (key, ctx);
774 return omp_build_component_ref (ctx->sender_decl, field);
777 static tree
778 build_sender_ref (tree var, omp_context *ctx)
780 return build_sender_ref ((splay_tree_key) var, ctx);
783 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
784 BASE_POINTERS_RESTRICT, declare the field with restrict. */
786 static void
787 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
789 tree field, type, sfield = NULL_TREE;
790 splay_tree_key key = (splay_tree_key) var;
792 if ((mask & 16) != 0)
794 key = (splay_tree_key) &DECL_NAME (var);
795 gcc_checking_assert (key != (splay_tree_key) var);
797 if ((mask & 8) != 0)
799 key = (splay_tree_key) &DECL_UID (var);
800 gcc_checking_assert (key != (splay_tree_key) var);
802 gcc_assert ((mask & 1) == 0
803 || !splay_tree_lookup (ctx->field_map, key));
804 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
805 || !splay_tree_lookup (ctx->sfield_map, key));
806 gcc_assert ((mask & 3) == 3
807 || !is_gimple_omp_oacc (ctx->stmt));
809 type = TREE_TYPE (var);
810 if ((mask & 16) != 0)
811 type = lang_hooks.decls.omp_array_data (var, true);
813 /* Prevent redeclaring the var in the split-off function with a restrict
814 pointer type. Note that we only clear type itself, restrict qualifiers in
815 the pointed-to type will be ignored by points-to analysis. */
816 if (POINTER_TYPE_P (type)
817 && TYPE_RESTRICT (type))
818 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
820 if (mask & 4)
822 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
823 type = build_pointer_type (build_pointer_type (type));
825 else if (by_ref)
826 type = build_pointer_type (type);
827 else if ((mask & (32 | 3)) == 1
828 && omp_privatize_by_reference (var))
829 type = TREE_TYPE (type);
831 field = build_decl (DECL_SOURCE_LOCATION (var),
832 FIELD_DECL, DECL_NAME (var), type);
834 /* Remember what variable this field was created for. This does have a
835 side effect of making dwarf2out ignore this member, so for helpful
836 debugging we clear it later in delete_omp_context. */
837 DECL_ABSTRACT_ORIGIN (field) = var;
838 if ((mask & 16) == 0 && type == TREE_TYPE (var))
840 SET_DECL_ALIGN (field, DECL_ALIGN (var));
841 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
842 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
844 else
845 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
847 if ((mask & 3) == 3)
849 insert_field_into_struct (ctx->record_type, field);
850 if (ctx->srecord_type)
852 sfield = build_decl (DECL_SOURCE_LOCATION (var),
853 FIELD_DECL, DECL_NAME (var), type);
854 DECL_ABSTRACT_ORIGIN (sfield) = var;
855 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
856 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
857 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
858 insert_field_into_struct (ctx->srecord_type, sfield);
861 else
863 if (ctx->srecord_type == NULL_TREE)
865 tree t;
867 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
868 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
869 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
871 sfield = build_decl (DECL_SOURCE_LOCATION (t),
872 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
873 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
874 insert_field_into_struct (ctx->srecord_type, sfield);
875 splay_tree_insert (ctx->sfield_map,
876 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
877 (splay_tree_value) sfield);
880 sfield = field;
881 insert_field_into_struct ((mask & 1) ? ctx->record_type
882 : ctx->srecord_type, field);
885 if (mask & 1)
886 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
887 if ((mask & 2) && ctx->sfield_map)
888 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
891 static tree
892 install_var_local (tree var, omp_context *ctx)
894 tree new_var = omp_copy_decl_1 (var, ctx);
895 insert_decl_map (&ctx->cb, var, new_var);
896 return new_var;
899 /* Adjust the replacement for DECL in CTX for the new context. This means
900 copying the DECL_VALUE_EXPR, and fixing up the type. */
902 static void
903 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
905 tree new_decl, size;
907 new_decl = lookup_decl (decl, ctx);
909 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
911 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
912 && DECL_HAS_VALUE_EXPR_P (decl))
914 tree ve = DECL_VALUE_EXPR (decl);
915 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
916 SET_DECL_VALUE_EXPR (new_decl, ve);
917 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
920 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
922 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
923 if (size == error_mark_node)
924 size = TYPE_SIZE (TREE_TYPE (new_decl));
925 DECL_SIZE (new_decl) = size;
927 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
928 if (size == error_mark_node)
929 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
930 DECL_SIZE_UNIT (new_decl) = size;
934 /* The callback for remap_decl. Search all containing contexts for a
935 mapping of the variable; this avoids having to duplicate the splay
936 tree ahead of time. We know a mapping doesn't already exist in the
937 given context. Create new mappings to implement default semantics. */
939 static tree
940 omp_copy_decl (tree var, copy_body_data *cb)
942 omp_context *ctx = (omp_context *) cb;
943 tree new_var;
945 if (TREE_CODE (var) == LABEL_DECL)
947 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
948 return var;
949 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
950 DECL_CONTEXT (new_var) = current_function_decl;
951 insert_decl_map (&ctx->cb, var, new_var);
952 return new_var;
955 while (!is_taskreg_ctx (ctx))
957 ctx = ctx->outer;
958 if (ctx == NULL)
959 return var;
960 new_var = maybe_lookup_decl (var, ctx);
961 if (new_var)
962 return new_var;
965 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
966 return var;
968 return error_mark_node;
971 /* Create a new context, with OUTER_CTX being the surrounding context. */
973 static omp_context *
974 new_omp_context (gimple *stmt, omp_context *outer_ctx)
976 omp_context *ctx = XCNEW (omp_context);
978 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
979 (splay_tree_value) ctx);
980 ctx->stmt = stmt;
982 if (outer_ctx)
984 ctx->outer = outer_ctx;
985 ctx->cb = outer_ctx->cb;
986 ctx->cb.block = NULL;
987 ctx->depth = outer_ctx->depth + 1;
989 else
991 ctx->cb.src_fn = current_function_decl;
992 ctx->cb.dst_fn = current_function_decl;
993 ctx->cb.src_node = cgraph_node::get (current_function_decl);
994 gcc_checking_assert (ctx->cb.src_node);
995 ctx->cb.dst_node = ctx->cb.src_node;
996 ctx->cb.src_cfun = cfun;
997 ctx->cb.copy_decl = omp_copy_decl;
998 ctx->cb.eh_lp_nr = 0;
999 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
1000 ctx->cb.adjust_array_error_bounds = true;
1001 ctx->cb.dont_remap_vla_if_no_change = true;
1002 ctx->depth = 1;
1005 ctx->cb.decl_map = new hash_map<tree, tree>;
1007 return ctx;
1010 static gimple_seq maybe_catch_exception (gimple_seq);
1012 /* Finalize task copyfn. */
1014 static void
1015 finalize_task_copyfn (gomp_task *task_stmt)
1017 struct function *child_cfun;
1018 tree child_fn;
1019 gimple_seq seq = NULL, new_seq;
1020 gbind *bind;
1022 child_fn = gimple_omp_task_copy_fn (task_stmt);
1023 if (child_fn == NULL_TREE)
1024 return;
1026 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1027 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1029 push_cfun (child_cfun);
1030 bind = gimplify_body (child_fn, false);
1031 gimple_seq_add_stmt (&seq, bind);
1032 new_seq = maybe_catch_exception (seq);
1033 if (new_seq != seq)
1035 bind = gimple_build_bind (NULL, new_seq, NULL);
1036 seq = NULL;
1037 gimple_seq_add_stmt (&seq, bind);
1039 gimple_set_body (child_fn, seq);
1040 pop_cfun ();
1042 /* Inform the callgraph about the new function. */
1043 cgraph_node *node = cgraph_node::get_create (child_fn);
1044 node->parallelized_function = 1;
1045 cgraph_node::add_new_function (child_fn, false);
1048 /* Destroy a omp_context data structures. Called through the splay tree
1049 value delete callback. */
1051 static void
1052 delete_omp_context (splay_tree_value value)
1054 omp_context *ctx = (omp_context *) value;
1056 delete ctx->cb.decl_map;
1058 if (ctx->field_map)
1059 splay_tree_delete (ctx->field_map);
1060 if (ctx->sfield_map)
1061 splay_tree_delete (ctx->sfield_map);
1063 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1064 it produces corrupt debug information. */
1065 if (ctx->record_type)
1067 tree t;
1068 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1069 DECL_ABSTRACT_ORIGIN (t) = NULL;
1071 if (ctx->srecord_type)
1073 tree t;
1074 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1075 DECL_ABSTRACT_ORIGIN (t) = NULL;
1078 if (is_task_ctx (ctx))
1079 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1081 if (ctx->task_reduction_map)
1083 ctx->task_reductions.release ();
1084 delete ctx->task_reduction_map;
1087 delete ctx->lastprivate_conditional_map;
1088 delete ctx->allocate_map;
1090 XDELETE (ctx);
1093 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1094 context. */
1096 static void
1097 fixup_child_record_type (omp_context *ctx)
1099 tree f, type = ctx->record_type;
1101 if (!ctx->receiver_decl)
1102 return;
1103 /* ??? It isn't sufficient to just call remap_type here, because
1104 variably_modified_type_p doesn't work the way we expect for
1105 record types. Testing each field for whether it needs remapping
1106 and creating a new record by hand works, however. */
1107 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1108 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1109 break;
1110 if (f)
1112 tree name, new_fields = NULL;
1114 type = lang_hooks.types.make_type (RECORD_TYPE);
1115 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1116 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1117 TYPE_DECL, name, type);
1118 TYPE_NAME (type) = name;
1120 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1122 tree new_f = copy_node (f);
1123 DECL_CONTEXT (new_f) = type;
1124 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1125 DECL_CHAIN (new_f) = new_fields;
1126 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1127 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1128 &ctx->cb, NULL);
1129 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1130 &ctx->cb, NULL);
1131 new_fields = new_f;
1133 /* Arrange to be able to look up the receiver field
1134 given the sender field. */
1135 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1136 (splay_tree_value) new_f);
1138 TYPE_FIELDS (type) = nreverse (new_fields);
1139 layout_type (type);
1142 /* In a target region we never modify any of the pointers in *.omp_data_i,
1143 so attempt to help the optimizers. */
1144 if (is_gimple_omp_offloaded (ctx->stmt))
1145 type = build_qualified_type (type, TYPE_QUAL_CONST);
1147 TREE_TYPE (ctx->receiver_decl)
1148 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1151 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1152 specified by CLAUSES. */
1154 static void
1155 scan_sharing_clauses (tree clauses, omp_context *ctx)
1157 tree c, decl;
1158 bool scan_array_reductions = false;
1160 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1161 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1162 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1163 /* omp_default_mem_alloc is 1 */
1164 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1165 || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1167 if (ctx->allocate_map == NULL)
1168 ctx->allocate_map = new hash_map<tree, tree>;
1169 tree val = integer_zero_node;
1170 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1171 val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1172 if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1173 val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1174 ctx->allocate_map->put (OMP_CLAUSE_DECL (c), val);
1177 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1179 bool by_ref;
1181 switch (OMP_CLAUSE_CODE (c))
1183 case OMP_CLAUSE_PRIVATE:
1184 decl = OMP_CLAUSE_DECL (c);
1185 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1186 goto do_private;
1187 else if (!is_variable_sized (decl))
1188 install_var_local (decl, ctx);
1189 break;
1191 case OMP_CLAUSE_SHARED:
1192 decl = OMP_CLAUSE_DECL (c);
1193 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1194 ctx->allocate_map->remove (decl);
1195 /* Ignore shared directives in teams construct inside of
1196 target construct. */
1197 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1198 && !is_host_teams_ctx (ctx))
1200 /* Global variables don't need to be copied,
1201 the receiver side will use them directly. */
1202 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1203 if (is_global_var (odecl))
1204 break;
1205 insert_decl_map (&ctx->cb, decl, odecl);
1206 break;
1208 gcc_assert (is_taskreg_ctx (ctx));
1209 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1210 || !is_variable_sized (decl));
1211 /* Global variables don't need to be copied,
1212 the receiver side will use them directly. */
1213 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1214 break;
1215 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1217 use_pointer_for_field (decl, ctx);
1218 break;
1220 by_ref = use_pointer_for_field (decl, NULL);
1221 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1222 || TREE_ADDRESSABLE (decl)
1223 || by_ref
1224 || omp_privatize_by_reference (decl))
1226 by_ref = use_pointer_for_field (decl, ctx);
1227 install_var_field (decl, by_ref, 3, ctx);
1228 install_var_local (decl, ctx);
1229 break;
1231 /* We don't need to copy const scalar vars back. */
1232 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1233 goto do_private;
1235 case OMP_CLAUSE_REDUCTION:
1236 /* Collect 'reduction' clauses on OpenACC compute construct. */
1237 if (is_gimple_omp_oacc (ctx->stmt)
1238 && is_gimple_omp_offloaded (ctx->stmt))
1240 /* No 'reduction' clauses on OpenACC 'kernels'. */
1241 gcc_checking_assert (!is_oacc_kernels (ctx));
1242 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1243 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1245 ctx->local_reduction_clauses
1246 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1248 /* FALLTHRU */
1250 case OMP_CLAUSE_IN_REDUCTION:
1251 decl = OMP_CLAUSE_DECL (c);
1252 if (ctx->allocate_map
1253 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1254 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1255 || OMP_CLAUSE_REDUCTION_TASK (c)))
1256 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1257 || is_task_ctx (ctx)))
1259 /* For now. */
1260 if (ctx->allocate_map->get (decl))
1261 ctx->allocate_map->remove (decl);
1263 if (TREE_CODE (decl) == MEM_REF)
1265 tree t = TREE_OPERAND (decl, 0);
1266 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1267 t = TREE_OPERAND (t, 0);
1268 if (TREE_CODE (t) == INDIRECT_REF
1269 || TREE_CODE (t) == ADDR_EXPR)
1270 t = TREE_OPERAND (t, 0);
1271 if (is_omp_target (ctx->stmt))
1273 if (is_variable_sized (t))
1275 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1276 t = DECL_VALUE_EXPR (t);
1277 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
1278 t = TREE_OPERAND (t, 0);
1279 gcc_assert (DECL_P (t));
1281 tree at = t;
1282 if (ctx->outer)
1283 scan_omp_op (&at, ctx->outer);
1284 tree nt = omp_copy_decl_1 (at, ctx);
1285 splay_tree_insert (ctx->field_map,
1286 (splay_tree_key) &DECL_CONTEXT (t),
1287 (splay_tree_value) nt);
1288 if (at != t)
1289 splay_tree_insert (ctx->field_map,
1290 (splay_tree_key) &DECL_CONTEXT (at),
1291 (splay_tree_value) nt);
1292 break;
1294 install_var_local (t, ctx);
1295 if (is_taskreg_ctx (ctx)
1296 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1297 || (is_task_ctx (ctx)
1298 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1299 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1300 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1301 == POINTER_TYPE)))))
1302 && !is_variable_sized (t)
1303 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1304 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1305 && !is_task_ctx (ctx))))
1307 by_ref = use_pointer_for_field (t, NULL);
1308 if (is_task_ctx (ctx)
1309 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1310 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1312 install_var_field (t, false, 1, ctx);
1313 install_var_field (t, by_ref, 2, ctx);
1315 else
1316 install_var_field (t, by_ref, 3, ctx);
1318 break;
1320 if (is_omp_target (ctx->stmt))
1322 tree at = decl;
1323 if (ctx->outer)
1324 scan_omp_op (&at, ctx->outer);
1325 tree nt = omp_copy_decl_1 (at, ctx);
1326 splay_tree_insert (ctx->field_map,
1327 (splay_tree_key) &DECL_CONTEXT (decl),
1328 (splay_tree_value) nt);
1329 if (at != decl)
1330 splay_tree_insert (ctx->field_map,
1331 (splay_tree_key) &DECL_CONTEXT (at),
1332 (splay_tree_value) nt);
1333 break;
1335 if (is_task_ctx (ctx)
1336 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1337 && OMP_CLAUSE_REDUCTION_TASK (c)
1338 && is_parallel_ctx (ctx)))
1340 /* Global variables don't need to be copied,
1341 the receiver side will use them directly. */
1342 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1344 by_ref = use_pointer_for_field (decl, ctx);
1345 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1346 install_var_field (decl, by_ref, 3, ctx);
1348 install_var_local (decl, ctx);
1349 break;
1351 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1352 && OMP_CLAUSE_REDUCTION_TASK (c))
1354 install_var_local (decl, ctx);
1355 break;
1357 goto do_private;
1359 case OMP_CLAUSE_LASTPRIVATE:
1360 /* Let the corresponding firstprivate clause create
1361 the variable. */
1362 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1363 break;
1364 /* FALLTHRU */
1366 case OMP_CLAUSE_FIRSTPRIVATE:
1367 case OMP_CLAUSE_LINEAR:
1368 decl = OMP_CLAUSE_DECL (c);
1369 do_private:
1370 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1371 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1372 && is_gimple_omp_offloaded (ctx->stmt))
1374 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1376 by_ref = !omp_privatize_by_reference (decl);
1377 install_var_field (decl, by_ref, 3, ctx);
1379 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1380 install_var_field (decl, true, 3, ctx);
1381 else
1382 install_var_field (decl, false, 3, ctx);
1384 if (is_variable_sized (decl))
1386 if (is_task_ctx (ctx))
1388 if (ctx->allocate_map
1389 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1391 /* For now. */
1392 if (ctx->allocate_map->get (decl))
1393 ctx->allocate_map->remove (decl);
1395 install_var_field (decl, false, 1, ctx);
1397 break;
1399 else if (is_taskreg_ctx (ctx))
1401 bool global
1402 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1403 by_ref = use_pointer_for_field (decl, NULL);
1405 if (is_task_ctx (ctx)
1406 && (global || by_ref || omp_privatize_by_reference (decl)))
1408 if (ctx->allocate_map
1409 && ctx->allocate_map->get (decl))
1410 install_var_field (decl, by_ref, 32 | 1, ctx);
1411 else
1412 install_var_field (decl, false, 1, ctx);
1413 if (!global)
1414 install_var_field (decl, by_ref, 2, ctx);
1416 else if (!global)
1417 install_var_field (decl, by_ref, 3, ctx);
1419 install_var_local (decl, ctx);
1420 break;
1422 case OMP_CLAUSE_USE_DEVICE_PTR:
1423 case OMP_CLAUSE_USE_DEVICE_ADDR:
1424 decl = OMP_CLAUSE_DECL (c);
1426 /* Fortran array descriptors. */
1427 if (lang_hooks.decls.omp_array_data (decl, true))
1428 install_var_field (decl, false, 19, ctx);
1429 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1430 && !omp_privatize_by_reference (decl)
1431 && !omp_is_allocatable_or_ptr (decl))
1432 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1433 install_var_field (decl, true, 11, ctx);
1434 else
1435 install_var_field (decl, false, 11, ctx);
1436 if (DECL_SIZE (decl)
1437 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1439 tree decl2 = DECL_VALUE_EXPR (decl);
1440 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1441 decl2 = TREE_OPERAND (decl2, 0);
1442 gcc_assert (DECL_P (decl2));
1443 install_var_local (decl2, ctx);
1445 install_var_local (decl, ctx);
1446 break;
1448 case OMP_CLAUSE_IS_DEVICE_PTR:
1449 decl = OMP_CLAUSE_DECL (c);
1450 goto do_private;
1452 case OMP_CLAUSE__LOOPTEMP_:
1453 case OMP_CLAUSE__REDUCTEMP_:
1454 gcc_assert (is_taskreg_ctx (ctx));
1455 decl = OMP_CLAUSE_DECL (c);
1456 install_var_field (decl, false, 3, ctx);
1457 install_var_local (decl, ctx);
1458 break;
1460 case OMP_CLAUSE_COPYPRIVATE:
1461 case OMP_CLAUSE_COPYIN:
1462 decl = OMP_CLAUSE_DECL (c);
1463 by_ref = use_pointer_for_field (decl, NULL);
1464 install_var_field (decl, by_ref, 3, ctx);
1465 break;
1467 case OMP_CLAUSE_FINAL:
1468 case OMP_CLAUSE_IF:
1469 case OMP_CLAUSE_NUM_THREADS:
1470 case OMP_CLAUSE_NUM_TEAMS:
1471 case OMP_CLAUSE_THREAD_LIMIT:
1472 case OMP_CLAUSE_DEVICE:
1473 case OMP_CLAUSE_SCHEDULE:
1474 case OMP_CLAUSE_DIST_SCHEDULE:
1475 case OMP_CLAUSE_DEPEND:
1476 case OMP_CLAUSE_PRIORITY:
1477 case OMP_CLAUSE_GRAINSIZE:
1478 case OMP_CLAUSE_NUM_TASKS:
1479 case OMP_CLAUSE_NUM_GANGS:
1480 case OMP_CLAUSE_NUM_WORKERS:
1481 case OMP_CLAUSE_VECTOR_LENGTH:
1482 case OMP_CLAUSE_DETACH:
1483 case OMP_CLAUSE_FILTER:
1484 if (ctx->outer)
1485 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1486 break;
1488 case OMP_CLAUSE_TO:
1489 case OMP_CLAUSE_FROM:
1490 case OMP_CLAUSE_MAP:
1491 if (ctx->outer)
1492 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1493 decl = OMP_CLAUSE_DECL (c);
1494 /* Global variables with "omp declare target" attribute
1495 don't need to be copied, the receiver side will use them
1496 directly. However, global variables with "omp declare target link"
1497 attribute need to be copied. Or when ALWAYS modifier is used. */
1498 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1499 && DECL_P (decl)
1500 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1501 && (OMP_CLAUSE_MAP_KIND (c)
1502 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1503 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1504 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1505 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1506 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1507 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1508 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1509 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1510 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1511 && varpool_node::get_create (decl)->offloadable
1512 && !lookup_attribute ("omp declare target link",
1513 DECL_ATTRIBUTES (decl)))
1514 break;
1515 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1516 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1518 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1519 not offloaded; there is nothing to map for those. */
1520 if (!is_gimple_omp_offloaded (ctx->stmt)
1521 && !POINTER_TYPE_P (TREE_TYPE (decl))
1522 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1523 break;
1525 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1526 && DECL_P (decl)
1527 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1528 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1529 && is_omp_target (ctx->stmt))
1531 /* If this is an offloaded region, an attach operation should
1532 only exist when the pointer variable is mapped in a prior
1533 clause. */
1534 if (is_gimple_omp_offloaded (ctx->stmt))
1535 gcc_assert
1536 (maybe_lookup_decl (decl, ctx)
1537 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1538 && lookup_attribute ("omp declare target",
1539 DECL_ATTRIBUTES (decl))));
1541 /* By itself, attach/detach is generated as part of pointer
1542 variable mapping and should not create new variables in the
1543 offloaded region, however sender refs for it must be created
1544 for its address to be passed to the runtime. */
1545 tree field
1546 = build_decl (OMP_CLAUSE_LOCATION (c),
1547 FIELD_DECL, NULL_TREE, ptr_type_node);
1548 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1549 insert_field_into_struct (ctx->record_type, field);
1550 /* To not clash with a map of the pointer variable itself,
1551 attach/detach maps have their field looked up by the *clause*
1552 tree expression, not the decl. */
1553 gcc_assert (!splay_tree_lookup (ctx->field_map,
1554 (splay_tree_key) c));
1555 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1556 (splay_tree_value) field);
1557 break;
1559 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1560 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1561 || (OMP_CLAUSE_MAP_KIND (c)
1562 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1564 if (TREE_CODE (decl) == COMPONENT_REF
1565 || (TREE_CODE (decl) == INDIRECT_REF
1566 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1567 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1568 == REFERENCE_TYPE)))
1569 break;
1570 if (DECL_SIZE (decl)
1571 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1573 tree decl2 = DECL_VALUE_EXPR (decl);
1574 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1575 decl2 = TREE_OPERAND (decl2, 0);
1576 gcc_assert (DECL_P (decl2));
1577 install_var_local (decl2, ctx);
1579 install_var_local (decl, ctx);
1580 break;
1582 if (DECL_P (decl))
1584 if (DECL_SIZE (decl)
1585 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1587 tree decl2 = DECL_VALUE_EXPR (decl);
1588 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1589 decl2 = TREE_OPERAND (decl2, 0);
1590 gcc_assert (DECL_P (decl2));
1591 install_var_field (decl2, true, 3, ctx);
1592 install_var_local (decl2, ctx);
1593 install_var_local (decl, ctx);
1595 else
1597 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1598 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1599 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1600 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1601 install_var_field (decl, true, 7, ctx);
1602 else
1603 install_var_field (decl, true, 3, ctx);
1604 if (is_gimple_omp_offloaded (ctx->stmt)
1605 && !(is_gimple_omp_oacc (ctx->stmt)
1606 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1607 install_var_local (decl, ctx);
1610 else
1612 tree base = get_base_address (decl);
1613 tree nc = OMP_CLAUSE_CHAIN (c);
1614 if (DECL_P (base)
1615 && nc != NULL_TREE
1616 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1617 && OMP_CLAUSE_DECL (nc) == base
1618 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1619 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1621 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1622 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1624 else
1626 if (ctx->outer)
1628 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1629 decl = OMP_CLAUSE_DECL (c);
1631 gcc_assert (!splay_tree_lookup (ctx->field_map,
1632 (splay_tree_key) decl));
1633 tree field
1634 = build_decl (OMP_CLAUSE_LOCATION (c),
1635 FIELD_DECL, NULL_TREE, ptr_type_node);
1636 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1637 insert_field_into_struct (ctx->record_type, field);
1638 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1639 (splay_tree_value) field);
1642 break;
1644 case OMP_CLAUSE_ORDER:
1645 ctx->order_concurrent = true;
1646 break;
1648 case OMP_CLAUSE_BIND:
1649 ctx->loop_p = true;
1650 break;
1652 case OMP_CLAUSE_NOWAIT:
1653 case OMP_CLAUSE_ORDERED:
1654 case OMP_CLAUSE_COLLAPSE:
1655 case OMP_CLAUSE_UNTIED:
1656 case OMP_CLAUSE_MERGEABLE:
1657 case OMP_CLAUSE_PROC_BIND:
1658 case OMP_CLAUSE_SAFELEN:
1659 case OMP_CLAUSE_SIMDLEN:
1660 case OMP_CLAUSE_THREADS:
1661 case OMP_CLAUSE_SIMD:
1662 case OMP_CLAUSE_NOGROUP:
1663 case OMP_CLAUSE_DEFAULTMAP:
1664 case OMP_CLAUSE_ASYNC:
1665 case OMP_CLAUSE_WAIT:
1666 case OMP_CLAUSE_GANG:
1667 case OMP_CLAUSE_WORKER:
1668 case OMP_CLAUSE_VECTOR:
1669 case OMP_CLAUSE_INDEPENDENT:
1670 case OMP_CLAUSE_AUTO:
1671 case OMP_CLAUSE_SEQ:
1672 case OMP_CLAUSE_TILE:
1673 case OMP_CLAUSE__SIMT_:
1674 case OMP_CLAUSE_DEFAULT:
1675 case OMP_CLAUSE_NONTEMPORAL:
1676 case OMP_CLAUSE_IF_PRESENT:
1677 case OMP_CLAUSE_FINALIZE:
1678 case OMP_CLAUSE_TASK_REDUCTION:
1679 case OMP_CLAUSE_ALLOCATE:
1680 break;
1682 case OMP_CLAUSE_ALIGNED:
1683 decl = OMP_CLAUSE_DECL (c);
1684 if (is_global_var (decl)
1685 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1686 install_var_local (decl, ctx);
1687 break;
1689 case OMP_CLAUSE__CONDTEMP_:
1690 decl = OMP_CLAUSE_DECL (c);
1691 if (is_parallel_ctx (ctx))
1693 install_var_field (decl, false, 3, ctx);
1694 install_var_local (decl, ctx);
1696 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1697 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1698 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1699 install_var_local (decl, ctx);
1700 break;
1702 case OMP_CLAUSE__CACHE_:
1703 case OMP_CLAUSE_NOHOST:
1704 default:
1705 gcc_unreachable ();
1709 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1711 switch (OMP_CLAUSE_CODE (c))
1713 case OMP_CLAUSE_LASTPRIVATE:
1714 /* Let the corresponding firstprivate clause create
1715 the variable. */
1716 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1717 scan_array_reductions = true;
1718 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1719 break;
1720 /* FALLTHRU */
1722 case OMP_CLAUSE_FIRSTPRIVATE:
1723 case OMP_CLAUSE_PRIVATE:
1724 case OMP_CLAUSE_LINEAR:
1725 case OMP_CLAUSE_IS_DEVICE_PTR:
1726 decl = OMP_CLAUSE_DECL (c);
1727 if (is_variable_sized (decl))
1729 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1730 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1731 && is_gimple_omp_offloaded (ctx->stmt))
1733 tree decl2 = DECL_VALUE_EXPR (decl);
1734 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1735 decl2 = TREE_OPERAND (decl2, 0);
1736 gcc_assert (DECL_P (decl2));
1737 install_var_local (decl2, ctx);
1738 fixup_remapped_decl (decl2, ctx, false);
1740 install_var_local (decl, ctx);
1742 fixup_remapped_decl (decl, ctx,
1743 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1744 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1745 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1746 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1747 scan_array_reductions = true;
1748 break;
1750 case OMP_CLAUSE_REDUCTION:
1751 case OMP_CLAUSE_IN_REDUCTION:
1752 decl = OMP_CLAUSE_DECL (c);
1753 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1755 if (is_variable_sized (decl))
1756 install_var_local (decl, ctx);
1757 fixup_remapped_decl (decl, ctx, false);
1759 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1760 scan_array_reductions = true;
1761 break;
1763 case OMP_CLAUSE_TASK_REDUCTION:
1764 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1765 scan_array_reductions = true;
1766 break;
1768 case OMP_CLAUSE_SHARED:
1769 /* Ignore shared directives in teams construct inside of
1770 target construct. */
1771 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1772 && !is_host_teams_ctx (ctx))
1773 break;
1774 decl = OMP_CLAUSE_DECL (c);
1775 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1776 break;
1777 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1779 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1780 ctx->outer)))
1781 break;
1782 bool by_ref = use_pointer_for_field (decl, ctx);
1783 install_var_field (decl, by_ref, 11, ctx);
1784 break;
1786 fixup_remapped_decl (decl, ctx, false);
1787 break;
1789 case OMP_CLAUSE_MAP:
1790 if (!is_gimple_omp_offloaded (ctx->stmt))
1791 break;
1792 decl = OMP_CLAUSE_DECL (c);
1793 if (DECL_P (decl)
1794 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1795 && (OMP_CLAUSE_MAP_KIND (c)
1796 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1797 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1798 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1799 && varpool_node::get_create (decl)->offloadable)
1800 break;
1801 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1802 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1803 && is_omp_target (ctx->stmt)
1804 && !is_gimple_omp_offloaded (ctx->stmt))
1805 break;
1806 if (DECL_P (decl))
1808 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1809 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1810 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1811 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1813 tree new_decl = lookup_decl (decl, ctx);
1814 TREE_TYPE (new_decl)
1815 = remap_type (TREE_TYPE (decl), &ctx->cb);
1817 else if (DECL_SIZE (decl)
1818 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1820 tree decl2 = DECL_VALUE_EXPR (decl);
1821 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1822 decl2 = TREE_OPERAND (decl2, 0);
1823 gcc_assert (DECL_P (decl2));
1824 fixup_remapped_decl (decl2, ctx, false);
1825 fixup_remapped_decl (decl, ctx, true);
1827 else
1828 fixup_remapped_decl (decl, ctx, false);
1830 break;
1832 case OMP_CLAUSE_COPYPRIVATE:
1833 case OMP_CLAUSE_COPYIN:
1834 case OMP_CLAUSE_DEFAULT:
1835 case OMP_CLAUSE_IF:
1836 case OMP_CLAUSE_NUM_THREADS:
1837 case OMP_CLAUSE_NUM_TEAMS:
1838 case OMP_CLAUSE_THREAD_LIMIT:
1839 case OMP_CLAUSE_DEVICE:
1840 case OMP_CLAUSE_SCHEDULE:
1841 case OMP_CLAUSE_DIST_SCHEDULE:
1842 case OMP_CLAUSE_NOWAIT:
1843 case OMP_CLAUSE_ORDERED:
1844 case OMP_CLAUSE_COLLAPSE:
1845 case OMP_CLAUSE_UNTIED:
1846 case OMP_CLAUSE_FINAL:
1847 case OMP_CLAUSE_MERGEABLE:
1848 case OMP_CLAUSE_PROC_BIND:
1849 case OMP_CLAUSE_SAFELEN:
1850 case OMP_CLAUSE_SIMDLEN:
1851 case OMP_CLAUSE_ALIGNED:
1852 case OMP_CLAUSE_DEPEND:
1853 case OMP_CLAUSE_DETACH:
1854 case OMP_CLAUSE_ALLOCATE:
1855 case OMP_CLAUSE__LOOPTEMP_:
1856 case OMP_CLAUSE__REDUCTEMP_:
1857 case OMP_CLAUSE_TO:
1858 case OMP_CLAUSE_FROM:
1859 case OMP_CLAUSE_PRIORITY:
1860 case OMP_CLAUSE_GRAINSIZE:
1861 case OMP_CLAUSE_NUM_TASKS:
1862 case OMP_CLAUSE_THREADS:
1863 case OMP_CLAUSE_SIMD:
1864 case OMP_CLAUSE_NOGROUP:
1865 case OMP_CLAUSE_DEFAULTMAP:
1866 case OMP_CLAUSE_ORDER:
1867 case OMP_CLAUSE_BIND:
1868 case OMP_CLAUSE_USE_DEVICE_PTR:
1869 case OMP_CLAUSE_USE_DEVICE_ADDR:
1870 case OMP_CLAUSE_NONTEMPORAL:
1871 case OMP_CLAUSE_ASYNC:
1872 case OMP_CLAUSE_WAIT:
1873 case OMP_CLAUSE_NUM_GANGS:
1874 case OMP_CLAUSE_NUM_WORKERS:
1875 case OMP_CLAUSE_VECTOR_LENGTH:
1876 case OMP_CLAUSE_GANG:
1877 case OMP_CLAUSE_WORKER:
1878 case OMP_CLAUSE_VECTOR:
1879 case OMP_CLAUSE_INDEPENDENT:
1880 case OMP_CLAUSE_AUTO:
1881 case OMP_CLAUSE_SEQ:
1882 case OMP_CLAUSE_TILE:
1883 case OMP_CLAUSE__SIMT_:
1884 case OMP_CLAUSE_IF_PRESENT:
1885 case OMP_CLAUSE_FINALIZE:
1886 case OMP_CLAUSE_FILTER:
1887 case OMP_CLAUSE__CONDTEMP_:
1888 break;
1890 case OMP_CLAUSE__CACHE_:
1891 case OMP_CLAUSE_NOHOST:
1892 default:
1893 gcc_unreachable ();
1897 gcc_checking_assert (!scan_array_reductions
1898 || !is_gimple_omp_oacc (ctx->stmt));
1899 if (scan_array_reductions)
1901 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1902 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1903 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1904 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1905 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1907 omp_context *rctx = ctx;
1908 if (is_omp_target (ctx->stmt))
1909 rctx = ctx->outer;
1910 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1911 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
1913 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1914 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1915 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1916 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1917 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1918 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1922 /* Create a new name for omp child function. Returns an identifier. */
1924 static tree
1925 create_omp_child_function_name (bool task_copy)
1927 return clone_function_name_numbered (current_function_decl,
1928 task_copy ? "_omp_cpyfn" : "_omp_fn");
1931 /* Return true if CTX may belong to offloaded code: either if current function
1932 is offloaded, or any enclosing context corresponds to a target region. */
1934 static bool
1935 omp_maybe_offloaded_ctx (omp_context *ctx)
1937 if (cgraph_node::get (current_function_decl)->offloadable)
1938 return true;
1939 for (; ctx; ctx = ctx->outer)
1940 if (is_gimple_omp_offloaded (ctx->stmt))
1941 return true;
1942 return false;
1945 /* Build a decl for the omp child function. It'll not contain a body
1946 yet, just the bare decl. */
1948 static void
1949 create_omp_child_function (omp_context *ctx, bool task_copy)
1951 tree decl, type, name, t;
1953 name = create_omp_child_function_name (task_copy);
1954 if (task_copy)
1955 type = build_function_type_list (void_type_node, ptr_type_node,
1956 ptr_type_node, NULL_TREE);
1957 else
1958 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1960 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1962 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1963 || !task_copy);
1964 if (!task_copy)
1965 ctx->cb.dst_fn = decl;
1966 else
1967 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1969 TREE_STATIC (decl) = 1;
1970 TREE_USED (decl) = 1;
1971 DECL_ARTIFICIAL (decl) = 1;
1972 DECL_IGNORED_P (decl) = 0;
1973 TREE_PUBLIC (decl) = 0;
1974 DECL_UNINLINABLE (decl) = 1;
1975 DECL_EXTERNAL (decl) = 0;
1976 DECL_CONTEXT (decl) = NULL_TREE;
1977 DECL_INITIAL (decl) = make_node (BLOCK);
1978 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1979 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1980 /* Remove omp declare simd attribute from the new attributes. */
1981 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1983 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1984 a = a2;
1985 a = TREE_CHAIN (a);
1986 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1987 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1988 *p = TREE_CHAIN (*p);
1989 else
1991 tree chain = TREE_CHAIN (*p);
1992 *p = copy_node (*p);
1993 p = &TREE_CHAIN (*p);
1994 *p = chain;
1997 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1998 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1999 DECL_FUNCTION_SPECIFIC_TARGET (decl)
2000 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2001 DECL_FUNCTION_VERSIONED (decl)
2002 = DECL_FUNCTION_VERSIONED (current_function_decl);
2004 if (omp_maybe_offloaded_ctx (ctx))
2006 cgraph_node::get_create (decl)->offloadable = 1;
2007 if (ENABLE_OFFLOADING)
2008 g->have_offload = true;
2011 if (cgraph_node::get_create (decl)->offloadable)
2013 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2014 ? "omp target entrypoint"
2015 : "omp declare target");
2016 if (lookup_attribute ("omp declare target",
2017 DECL_ATTRIBUTES (current_function_decl)))
2019 if (is_gimple_omp_offloaded (ctx->stmt))
2020 DECL_ATTRIBUTES (decl)
2021 = remove_attribute ("omp declare target",
2022 copy_list (DECL_ATTRIBUTES (decl)));
2023 else
2024 target_attr = NULL;
2026 if (target_attr)
2027 DECL_ATTRIBUTES (decl)
2028 = tree_cons (get_identifier (target_attr),
2029 NULL_TREE, DECL_ATTRIBUTES (decl));
2032 t = build_decl (DECL_SOURCE_LOCATION (decl),
2033 RESULT_DECL, NULL_TREE, void_type_node);
2034 DECL_ARTIFICIAL (t) = 1;
2035 DECL_IGNORED_P (t) = 1;
2036 DECL_CONTEXT (t) = decl;
2037 DECL_RESULT (decl) = t;
2039 tree data_name = get_identifier (".omp_data_i");
2040 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2041 ptr_type_node);
2042 DECL_ARTIFICIAL (t) = 1;
2043 DECL_NAMELESS (t) = 1;
2044 DECL_ARG_TYPE (t) = ptr_type_node;
2045 DECL_CONTEXT (t) = current_function_decl;
2046 TREE_USED (t) = 1;
2047 TREE_READONLY (t) = 1;
2048 DECL_ARGUMENTS (decl) = t;
2049 if (!task_copy)
2050 ctx->receiver_decl = t;
2051 else
2053 t = build_decl (DECL_SOURCE_LOCATION (decl),
2054 PARM_DECL, get_identifier (".omp_data_o"),
2055 ptr_type_node);
2056 DECL_ARTIFICIAL (t) = 1;
2057 DECL_NAMELESS (t) = 1;
2058 DECL_ARG_TYPE (t) = ptr_type_node;
2059 DECL_CONTEXT (t) = current_function_decl;
2060 TREE_USED (t) = 1;
2061 TREE_ADDRESSABLE (t) = 1;
2062 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2063 DECL_ARGUMENTS (decl) = t;
2066 /* Allocate memory for the function structure. The call to
2067 allocate_struct_function clobbers CFUN, so we need to restore
2068 it afterward. */
2069 push_struct_function (decl);
2070 cfun->function_end_locus = gimple_location (ctx->stmt);
2071 init_tree_ssa (cfun);
2072 pop_cfun ();
2075 /* Callback for walk_gimple_seq. Check if combined parallel
2076 contains gimple_omp_for_combined_into_p OMP_FOR. */
2078 tree
2079 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2080 bool *handled_ops_p,
2081 struct walk_stmt_info *wi)
2083 gimple *stmt = gsi_stmt (*gsi_p);
2085 *handled_ops_p = true;
2086 switch (gimple_code (stmt))
2088 WALK_SUBSTMTS;
2090 case GIMPLE_OMP_FOR:
2091 if (gimple_omp_for_combined_into_p (stmt)
2092 && gimple_omp_for_kind (stmt)
2093 == *(const enum gf_mask *) (wi->info))
2095 wi->info = stmt;
2096 return integer_zero_node;
2098 break;
2099 default:
2100 break;
2102 return NULL;
2105 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2107 static void
2108 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2109 omp_context *outer_ctx)
2111 struct walk_stmt_info wi;
2113 memset (&wi, 0, sizeof (wi));
2114 wi.val_only = true;
2115 wi.info = (void *) &msk;
2116 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2117 if (wi.info != (void *) &msk)
2119 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2120 struct omp_for_data fd;
2121 omp_extract_for_data (for_stmt, &fd, NULL);
2122 /* We need two temporaries with fd.loop.v type (istart/iend)
2123 and then (fd.collapse - 1) temporaries with the same
2124 type for count2 ... countN-1 vars if not constant. */
2125 size_t count = 2, i;
2126 tree type = fd.iter_type;
2127 if (fd.collapse > 1
2128 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2130 count += fd.collapse - 1;
2131 /* If there are lastprivate clauses on the inner
2132 GIMPLE_OMP_FOR, add one more temporaries for the total number
2133 of iterations (product of count1 ... countN-1). */
2134 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2135 OMP_CLAUSE_LASTPRIVATE)
2136 || (msk == GF_OMP_FOR_KIND_FOR
2137 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2138 OMP_CLAUSE_LASTPRIVATE)))
2140 tree temp = create_tmp_var (type);
2141 tree c = build_omp_clause (UNKNOWN_LOCATION,
2142 OMP_CLAUSE__LOOPTEMP_);
2143 insert_decl_map (&outer_ctx->cb, temp, temp);
2144 OMP_CLAUSE_DECL (c) = temp;
2145 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2146 gimple_omp_taskreg_set_clauses (stmt, c);
2148 if (fd.non_rect
2149 && fd.last_nonrect == fd.first_nonrect + 1)
2150 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2151 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2153 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2154 tree type2 = TREE_TYPE (v);
2155 count++;
2156 for (i = 0; i < 3; i++)
2158 tree temp = create_tmp_var (type2);
2159 tree c = build_omp_clause (UNKNOWN_LOCATION,
2160 OMP_CLAUSE__LOOPTEMP_);
2161 insert_decl_map (&outer_ctx->cb, temp, temp);
2162 OMP_CLAUSE_DECL (c) = temp;
2163 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2164 gimple_omp_taskreg_set_clauses (stmt, c);
2168 for (i = 0; i < count; i++)
2170 tree temp = create_tmp_var (type);
2171 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2172 insert_decl_map (&outer_ctx->cb, temp, temp);
2173 OMP_CLAUSE_DECL (c) = temp;
2174 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2175 gimple_omp_taskreg_set_clauses (stmt, c);
2178 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2179 && omp_find_clause (gimple_omp_task_clauses (stmt),
2180 OMP_CLAUSE_REDUCTION))
2182 tree type = build_pointer_type (pointer_sized_int_node);
2183 tree temp = create_tmp_var (type);
2184 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2185 insert_decl_map (&outer_ctx->cb, temp, temp);
2186 OMP_CLAUSE_DECL (c) = temp;
2187 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2188 gimple_omp_task_set_clauses (stmt, c);
2192 /* Scan an OpenMP parallel directive. */
2194 static void
2195 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2197 omp_context *ctx;
2198 tree name;
2199 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2201 /* Ignore parallel directives with empty bodies, unless there
2202 are copyin clauses. */
2203 if (optimize > 0
2204 && empty_body_p (gimple_omp_body (stmt))
2205 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2206 OMP_CLAUSE_COPYIN) == NULL)
2208 gsi_replace (gsi, gimple_build_nop (), false);
2209 return;
2212 if (gimple_omp_parallel_combined_p (stmt))
2213 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2214 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2215 OMP_CLAUSE_REDUCTION);
2216 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2217 if (OMP_CLAUSE_REDUCTION_TASK (c))
2219 tree type = build_pointer_type (pointer_sized_int_node);
2220 tree temp = create_tmp_var (type);
2221 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2222 if (outer_ctx)
2223 insert_decl_map (&outer_ctx->cb, temp, temp);
2224 OMP_CLAUSE_DECL (c) = temp;
2225 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2226 gimple_omp_parallel_set_clauses (stmt, c);
2227 break;
2229 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2230 break;
2232 ctx = new_omp_context (stmt, outer_ctx);
2233 taskreg_contexts.safe_push (ctx);
2234 if (taskreg_nesting_level > 1)
2235 ctx->is_nested = true;
2236 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2237 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2238 name = create_tmp_var_name (".omp_data_s");
2239 name = build_decl (gimple_location (stmt),
2240 TYPE_DECL, name, ctx->record_type);
2241 DECL_ARTIFICIAL (name) = 1;
2242 DECL_NAMELESS (name) = 1;
2243 TYPE_NAME (ctx->record_type) = name;
2244 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2245 create_omp_child_function (ctx, false);
2246 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2248 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2249 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2251 if (TYPE_FIELDS (ctx->record_type) == NULL)
2252 ctx->record_type = ctx->receiver_decl = NULL;
2255 /* Scan an OpenMP task directive. */
2257 static void
2258 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2260 omp_context *ctx;
2261 tree name, t;
2262 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2264 /* Ignore task directives with empty bodies, unless they have depend
2265 clause. */
2266 if (optimize > 0
2267 && gimple_omp_body (stmt)
2268 && empty_body_p (gimple_omp_body (stmt))
2269 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2271 gsi_replace (gsi, gimple_build_nop (), false);
2272 return;
2275 if (gimple_omp_task_taskloop_p (stmt))
2276 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2278 ctx = new_omp_context (stmt, outer_ctx);
2280 if (gimple_omp_task_taskwait_p (stmt))
2282 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2283 return;
2286 taskreg_contexts.safe_push (ctx);
2287 if (taskreg_nesting_level > 1)
2288 ctx->is_nested = true;
2289 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2290 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2291 name = create_tmp_var_name (".omp_data_s");
2292 name = build_decl (gimple_location (stmt),
2293 TYPE_DECL, name, ctx->record_type);
2294 DECL_ARTIFICIAL (name) = 1;
2295 DECL_NAMELESS (name) = 1;
2296 TYPE_NAME (ctx->record_type) = name;
2297 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2298 create_omp_child_function (ctx, false);
2299 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2301 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2303 if (ctx->srecord_type)
2305 name = create_tmp_var_name (".omp_data_a");
2306 name = build_decl (gimple_location (stmt),
2307 TYPE_DECL, name, ctx->srecord_type);
2308 DECL_ARTIFICIAL (name) = 1;
2309 DECL_NAMELESS (name) = 1;
2310 TYPE_NAME (ctx->srecord_type) = name;
2311 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2312 create_omp_child_function (ctx, true);
2315 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2317 if (TYPE_FIELDS (ctx->record_type) == NULL)
2319 ctx->record_type = ctx->receiver_decl = NULL;
2320 t = build_int_cst (long_integer_type_node, 0);
2321 gimple_omp_task_set_arg_size (stmt, t);
2322 t = build_int_cst (long_integer_type_node, 1);
2323 gimple_omp_task_set_arg_align (stmt, t);
2327 /* Helper function for finish_taskreg_scan, called through walk_tree.
2328 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2329 tree, replace it in the expression. */
2331 static tree
2332 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2334 if (VAR_P (*tp))
2336 omp_context *ctx = (omp_context *) data;
2337 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2338 if (t != *tp)
2340 if (DECL_HAS_VALUE_EXPR_P (t))
2341 t = unshare_expr (DECL_VALUE_EXPR (t));
2342 *tp = t;
2344 *walk_subtrees = 0;
2346 else if (IS_TYPE_OR_DECL_P (*tp))
2347 *walk_subtrees = 0;
2348 return NULL_TREE;
2351 /* If any decls have been made addressable during scan_omp,
2352 adjust their fields if needed, and layout record types
2353 of parallel/task constructs. */
2355 static void
2356 finish_taskreg_scan (omp_context *ctx)
2358 if (ctx->record_type == NULL_TREE)
2359 return;
2361 /* If any task_shared_vars were needed, verify all
2362 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2363 statements if use_pointer_for_field hasn't changed
2364 because of that. If it did, update field types now. */
2365 if (task_shared_vars)
2367 tree c;
2369 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2370 c; c = OMP_CLAUSE_CHAIN (c))
2371 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2372 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2374 tree decl = OMP_CLAUSE_DECL (c);
2376 /* Global variables don't need to be copied,
2377 the receiver side will use them directly. */
2378 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2379 continue;
2380 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2381 || !use_pointer_for_field (decl, ctx))
2382 continue;
2383 tree field = lookup_field (decl, ctx);
2384 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2385 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2386 continue;
2387 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2388 TREE_THIS_VOLATILE (field) = 0;
2389 DECL_USER_ALIGN (field) = 0;
2390 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2391 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2392 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2393 if (ctx->srecord_type)
2395 tree sfield = lookup_sfield (decl, ctx);
2396 TREE_TYPE (sfield) = TREE_TYPE (field);
2397 TREE_THIS_VOLATILE (sfield) = 0;
2398 DECL_USER_ALIGN (sfield) = 0;
2399 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2400 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2401 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2406 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2408 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2409 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2410 if (c)
2412 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2413 expects to find it at the start of data. */
2414 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2415 tree *p = &TYPE_FIELDS (ctx->record_type);
2416 while (*p)
2417 if (*p == f)
2419 *p = DECL_CHAIN (*p);
2420 break;
2422 else
2423 p = &DECL_CHAIN (*p);
2424 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2425 TYPE_FIELDS (ctx->record_type) = f;
2427 layout_type (ctx->record_type);
2428 fixup_child_record_type (ctx);
2430 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2432 layout_type (ctx->record_type);
2433 fixup_child_record_type (ctx);
2435 else
2437 location_t loc = gimple_location (ctx->stmt);
2438 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2439 tree detach_clause
2440 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2441 OMP_CLAUSE_DETACH);
2442 /* Move VLA fields to the end. */
2443 p = &TYPE_FIELDS (ctx->record_type);
2444 while (*p)
2445 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2446 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2448 *q = *p;
2449 *p = TREE_CHAIN (*p);
2450 TREE_CHAIN (*q) = NULL_TREE;
2451 q = &TREE_CHAIN (*q);
2453 else
2454 p = &DECL_CHAIN (*p);
2455 *p = vla_fields;
2456 if (gimple_omp_task_taskloop_p (ctx->stmt))
2458 /* Move fields corresponding to first and second _looptemp_
2459 clause first. There are filled by GOMP_taskloop
2460 and thus need to be in specific positions. */
2461 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2462 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2463 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2464 OMP_CLAUSE__LOOPTEMP_);
2465 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2466 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2467 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2468 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2469 p = &TYPE_FIELDS (ctx->record_type);
2470 while (*p)
2471 if (*p == f1 || *p == f2 || *p == f3)
2472 *p = DECL_CHAIN (*p);
2473 else
2474 p = &DECL_CHAIN (*p);
2475 DECL_CHAIN (f1) = f2;
2476 if (c3)
2478 DECL_CHAIN (f2) = f3;
2479 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2481 else
2482 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2483 TYPE_FIELDS (ctx->record_type) = f1;
2484 if (ctx->srecord_type)
2486 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2487 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2488 if (c3)
2489 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2490 p = &TYPE_FIELDS (ctx->srecord_type);
2491 while (*p)
2492 if (*p == f1 || *p == f2 || *p == f3)
2493 *p = DECL_CHAIN (*p);
2494 else
2495 p = &DECL_CHAIN (*p);
2496 DECL_CHAIN (f1) = f2;
2497 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2498 if (c3)
2500 DECL_CHAIN (f2) = f3;
2501 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2503 else
2504 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2505 TYPE_FIELDS (ctx->srecord_type) = f1;
2508 if (detach_clause)
2510 tree c, field;
2512 /* Look for a firstprivate clause with the detach event handle. */
2513 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2514 c; c = OMP_CLAUSE_CHAIN (c))
2516 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2517 continue;
2518 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2519 == OMP_CLAUSE_DECL (detach_clause))
2520 break;
2523 gcc_assert (c);
2524 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2526 /* Move field corresponding to the detach clause first.
2527 This is filled by GOMP_task and needs to be in a
2528 specific position. */
2529 p = &TYPE_FIELDS (ctx->record_type);
2530 while (*p)
2531 if (*p == field)
2532 *p = DECL_CHAIN (*p);
2533 else
2534 p = &DECL_CHAIN (*p);
2535 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2536 TYPE_FIELDS (ctx->record_type) = field;
2537 if (ctx->srecord_type)
2539 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2540 p = &TYPE_FIELDS (ctx->srecord_type);
2541 while (*p)
2542 if (*p == field)
2543 *p = DECL_CHAIN (*p);
2544 else
2545 p = &DECL_CHAIN (*p);
2546 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2547 TYPE_FIELDS (ctx->srecord_type) = field;
2550 layout_type (ctx->record_type);
2551 fixup_child_record_type (ctx);
2552 if (ctx->srecord_type)
2553 layout_type (ctx->srecord_type);
2554 tree t = fold_convert_loc (loc, long_integer_type_node,
2555 TYPE_SIZE_UNIT (ctx->record_type));
2556 if (TREE_CODE (t) != INTEGER_CST)
2558 t = unshare_expr (t);
2559 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2561 gimple_omp_task_set_arg_size (ctx->stmt, t);
2562 t = build_int_cst (long_integer_type_node,
2563 TYPE_ALIGN_UNIT (ctx->record_type));
2564 gimple_omp_task_set_arg_align (ctx->stmt, t);
2568 /* Find the enclosing offload context. */
2570 static omp_context *
2571 enclosing_target_ctx (omp_context *ctx)
2573 for (; ctx; ctx = ctx->outer)
2574 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2575 break;
2577 return ctx;
2580 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2581 construct.
2582 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2584 static bool
2585 ctx_in_oacc_kernels_region (omp_context *ctx)
2587 for (;ctx != NULL; ctx = ctx->outer)
2589 gimple *stmt = ctx->stmt;
2590 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2591 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2592 return true;
2595 return false;
2598 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2599 (This doesn't include OpenACC 'kernels' decomposed parts.)
2600 Until kernels handling moves to use the same loop indirection
2601 scheme as parallel, we need to do this checking early. */
2603 static unsigned
2604 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2606 bool checking = true;
2607 unsigned outer_mask = 0;
2608 unsigned this_mask = 0;
2609 bool has_seq = false, has_auto = false;
2611 if (ctx->outer)
2612 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2613 if (!stmt)
2615 checking = false;
2616 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2617 return outer_mask;
2618 stmt = as_a <gomp_for *> (ctx->stmt);
2621 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2623 switch (OMP_CLAUSE_CODE (c))
2625 case OMP_CLAUSE_GANG:
2626 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2627 break;
2628 case OMP_CLAUSE_WORKER:
2629 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2630 break;
2631 case OMP_CLAUSE_VECTOR:
2632 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2633 break;
2634 case OMP_CLAUSE_SEQ:
2635 has_seq = true;
2636 break;
2637 case OMP_CLAUSE_AUTO:
2638 has_auto = true;
2639 break;
2640 default:
2641 break;
2645 if (checking)
2647 if (has_seq && (this_mask || has_auto))
2648 error_at (gimple_location (stmt), "%<seq%> overrides other"
2649 " OpenACC loop specifiers");
2650 else if (has_auto && this_mask)
2651 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2652 " OpenACC loop specifiers");
2654 if (this_mask & outer_mask)
2655 error_at (gimple_location (stmt), "inner loop uses same"
2656 " OpenACC parallelism as containing loop");
2659 return outer_mask | this_mask;
2662 /* Scan a GIMPLE_OMP_FOR. */
2664 static omp_context *
2665 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2667 omp_context *ctx;
2668 size_t i;
2669 tree clauses = gimple_omp_for_clauses (stmt);
2671 ctx = new_omp_context (stmt, outer_ctx);
2673 if (is_gimple_omp_oacc (stmt))
2675 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2677 if (!(tgt && is_oacc_kernels (tgt)))
2678 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2680 tree c_op0;
2681 switch (OMP_CLAUSE_CODE (c))
2683 case OMP_CLAUSE_GANG:
2684 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2685 break;
2687 case OMP_CLAUSE_WORKER:
2688 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2689 break;
2691 case OMP_CLAUSE_VECTOR:
2692 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2693 break;
2695 default:
2696 continue;
2699 if (c_op0)
2701 /* By construction, this is impossible for OpenACC 'kernels'
2702 decomposed parts. */
2703 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2705 error_at (OMP_CLAUSE_LOCATION (c),
2706 "argument not permitted on %qs clause",
2707 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2708 if (tgt)
2709 inform (gimple_location (tgt->stmt),
2710 "enclosing parent compute construct");
2711 else if (oacc_get_fn_attrib (current_function_decl))
2712 inform (DECL_SOURCE_LOCATION (current_function_decl),
2713 "enclosing routine");
2714 else
2715 gcc_unreachable ();
2719 if (tgt && is_oacc_kernels (tgt))
2720 check_oacc_kernel_gwv (stmt, ctx);
2722 /* Collect all variables named in reductions on this loop. Ensure
2723 that, if this loop has a reduction on some variable v, and there is
2724 a reduction on v somewhere in an outer context, then there is a
2725 reduction on v on all intervening loops as well. */
2726 tree local_reduction_clauses = NULL;
2727 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2729 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2730 local_reduction_clauses
2731 = tree_cons (NULL, c, local_reduction_clauses);
2733 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2734 ctx->outer_reduction_clauses
2735 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2736 ctx->outer->outer_reduction_clauses);
2737 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2738 tree local_iter = local_reduction_clauses;
2739 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2741 tree local_clause = TREE_VALUE (local_iter);
2742 tree local_var = OMP_CLAUSE_DECL (local_clause);
2743 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2744 bool have_outer_reduction = false;
2745 tree ctx_iter = outer_reduction_clauses;
2746 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2748 tree outer_clause = TREE_VALUE (ctx_iter);
2749 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2750 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2751 if (outer_var == local_var && outer_op != local_op)
2753 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2754 "conflicting reduction operations for %qE",
2755 local_var);
2756 inform (OMP_CLAUSE_LOCATION (outer_clause),
2757 "location of the previous reduction for %qE",
2758 outer_var);
2760 if (outer_var == local_var)
2762 have_outer_reduction = true;
2763 break;
2766 if (have_outer_reduction)
2768 /* There is a reduction on outer_var both on this loop and on
2769 some enclosing loop. Walk up the context tree until such a
2770 loop with a reduction on outer_var is found, and complain
2771 about all intervening loops that do not have such a
2772 reduction. */
2773 struct omp_context *curr_loop = ctx->outer;
2774 bool found = false;
2775 while (curr_loop != NULL)
2777 tree curr_iter = curr_loop->local_reduction_clauses;
2778 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2780 tree curr_clause = TREE_VALUE (curr_iter);
2781 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2782 if (curr_var == local_var)
2784 found = true;
2785 break;
2788 if (!found)
2789 warning_at (gimple_location (curr_loop->stmt), 0,
2790 "nested loop in reduction needs "
2791 "reduction clause for %qE",
2792 local_var);
2793 else
2794 break;
2795 curr_loop = curr_loop->outer;
2799 ctx->local_reduction_clauses = local_reduction_clauses;
2800 ctx->outer_reduction_clauses
2801 = chainon (unshare_expr (ctx->local_reduction_clauses),
2802 ctx->outer_reduction_clauses);
2804 if (tgt && is_oacc_kernels (tgt))
2806 /* Strip out reductions, as they are not handled yet. */
2807 tree *prev_ptr = &clauses;
2809 while (tree probe = *prev_ptr)
2811 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2813 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2814 *prev_ptr = *next_ptr;
2815 else
2816 prev_ptr = next_ptr;
2819 gimple_omp_for_set_clauses (stmt, clauses);
2823 scan_sharing_clauses (clauses, ctx);
2825 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2826 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2828 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2829 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2830 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2831 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2833 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2834 return ctx;
2837 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2839 static void
2840 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2841 omp_context *outer_ctx)
2843 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2844 gsi_replace (gsi, bind, false);
2845 gimple_seq seq = NULL;
2846 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2847 tree cond = create_tmp_var_raw (integer_type_node);
2848 DECL_CONTEXT (cond) = current_function_decl;
2849 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2850 gimple_bind_set_vars (bind, cond);
2851 gimple_call_set_lhs (g, cond);
2852 gimple_seq_add_stmt (&seq, g);
2853 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2854 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2855 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2856 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2857 gimple_seq_add_stmt (&seq, g);
2858 g = gimple_build_label (lab1);
2859 gimple_seq_add_stmt (&seq, g);
2860 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2861 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2862 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2863 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2864 gimple_omp_for_set_clauses (new_stmt, clause);
2865 gimple_seq_add_stmt (&seq, new_stmt);
2866 g = gimple_build_goto (lab3);
2867 gimple_seq_add_stmt (&seq, g);
2868 g = gimple_build_label (lab2);
2869 gimple_seq_add_stmt (&seq, g);
2870 gimple_seq_add_stmt (&seq, stmt);
2871 g = gimple_build_label (lab3);
2872 gimple_seq_add_stmt (&seq, g);
2873 gimple_bind_set_body (bind, seq);
2874 update_stmt (bind);
2875 scan_omp_for (new_stmt, outer_ctx);
2876 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2879 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2880 struct walk_stmt_info *);
2881 static omp_context *maybe_lookup_ctx (gimple *);
2883 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2884 for scan phase loop. */
2886 static void
2887 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2888 omp_context *outer_ctx)
2890 /* The only change between inclusive and exclusive scan will be
2891 within the first simd loop, so just use inclusive in the
2892 worksharing loop. */
2893 outer_ctx->scan_inclusive = true;
2894 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2895 OMP_CLAUSE_DECL (c) = integer_zero_node;
2897 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2898 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2899 gsi_replace (gsi, input_stmt, false);
2900 gimple_seq input_body = NULL;
2901 gimple_seq_add_stmt (&input_body, stmt);
2902 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2904 gimple_stmt_iterator input1_gsi = gsi_none ();
2905 struct walk_stmt_info wi;
2906 memset (&wi, 0, sizeof (wi));
2907 wi.val_only = true;
2908 wi.info = (void *) &input1_gsi;
2909 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2910 gcc_assert (!gsi_end_p (input1_gsi));
2912 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2913 gsi_next (&input1_gsi);
2914 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2915 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2916 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2917 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2918 std::swap (input_stmt1, scan_stmt1);
2920 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2921 gimple_omp_set_body (input_stmt1, NULL);
2923 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2924 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2926 gimple_omp_set_body (input_stmt1, input_body1);
2927 gimple_omp_set_body (scan_stmt1, NULL);
2929 gimple_stmt_iterator input2_gsi = gsi_none ();
2930 memset (&wi, 0, sizeof (wi));
2931 wi.val_only = true;
2932 wi.info = (void *) &input2_gsi;
2933 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2934 NULL, &wi);
2935 gcc_assert (!gsi_end_p (input2_gsi));
2937 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2938 gsi_next (&input2_gsi);
2939 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2940 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2941 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2942 std::swap (input_stmt2, scan_stmt2);
2944 gimple_omp_set_body (input_stmt2, NULL);
2946 gimple_omp_set_body (input_stmt, input_body);
2947 gimple_omp_set_body (scan_stmt, scan_body);
2949 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2950 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2952 ctx = new_omp_context (scan_stmt, outer_ctx);
2953 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2955 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2958 /* Scan an OpenMP sections directive. */
2960 static void
2961 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2963 omp_context *ctx;
2965 ctx = new_omp_context (stmt, outer_ctx);
2966 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2967 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2970 /* Scan an OpenMP single directive. */
2972 static void
2973 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2975 omp_context *ctx;
2976 tree name;
2978 ctx = new_omp_context (stmt, outer_ctx);
2979 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2980 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2981 name = create_tmp_var_name (".omp_copy_s");
2982 name = build_decl (gimple_location (stmt),
2983 TYPE_DECL, name, ctx->record_type);
2984 TYPE_NAME (ctx->record_type) = name;
2986 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2987 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2989 if (TYPE_FIELDS (ctx->record_type) == NULL)
2990 ctx->record_type = NULL;
2991 else
2992 layout_type (ctx->record_type);
2995 /* Scan a GIMPLE_OMP_TARGET. */
2997 static void
2998 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3000 omp_context *ctx;
3001 tree name;
3002 bool offloaded = is_gimple_omp_offloaded (stmt);
3003 tree clauses = gimple_omp_target_clauses (stmt);
3005 ctx = new_omp_context (stmt, outer_ctx);
3006 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3007 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3008 name = create_tmp_var_name (".omp_data_t");
3009 name = build_decl (gimple_location (stmt),
3010 TYPE_DECL, name, ctx->record_type);
3011 DECL_ARTIFICIAL (name) = 1;
3012 DECL_NAMELESS (name) = 1;
3013 TYPE_NAME (ctx->record_type) = name;
3014 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3016 if (offloaded)
3018 create_omp_child_function (ctx, false);
3019 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3022 scan_sharing_clauses (clauses, ctx);
3023 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3025 if (TYPE_FIELDS (ctx->record_type) == NULL)
3026 ctx->record_type = ctx->receiver_decl = NULL;
3027 else
3029 TYPE_FIELDS (ctx->record_type)
3030 = nreverse (TYPE_FIELDS (ctx->record_type));
3031 if (flag_checking)
3033 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3034 for (tree field = TYPE_FIELDS (ctx->record_type);
3035 field;
3036 field = DECL_CHAIN (field))
3037 gcc_assert (DECL_ALIGN (field) == align);
3039 layout_type (ctx->record_type);
3040 if (offloaded)
3041 fixup_child_record_type (ctx);
3044 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3046 error_at (gimple_location (stmt),
3047 "%<target%> construct with nested %<teams%> construct "
3048 "contains directives outside of the %<teams%> construct");
3049 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3053 /* Scan an OpenMP teams directive. */
3055 static void
3056 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3058 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3060 if (!gimple_omp_teams_host (stmt))
3062 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3063 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3064 return;
3066 taskreg_contexts.safe_push (ctx);
3067 gcc_assert (taskreg_nesting_level == 1);
3068 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3069 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3070 tree name = create_tmp_var_name (".omp_data_s");
3071 name = build_decl (gimple_location (stmt),
3072 TYPE_DECL, name, ctx->record_type);
3073 DECL_ARTIFICIAL (name) = 1;
3074 DECL_NAMELESS (name) = 1;
3075 TYPE_NAME (ctx->record_type) = name;
3076 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3077 create_omp_child_function (ctx, false);
3078 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3080 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3081 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3083 if (TYPE_FIELDS (ctx->record_type) == NULL)
3084 ctx->record_type = ctx->receiver_decl = NULL;
3087 /* Check nesting restrictions. */
3088 static bool
3089 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3091 tree c;
3093 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3094 inside an OpenACC CTX. */
3095 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3096 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3097 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3099 else if (!(is_gimple_omp (stmt)
3100 && is_gimple_omp_oacc (stmt)))
3102 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3104 error_at (gimple_location (stmt),
3105 "non-OpenACC construct inside of OpenACC routine");
3106 return false;
3108 else
3109 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3110 if (is_gimple_omp (octx->stmt)
3111 && is_gimple_omp_oacc (octx->stmt))
3113 error_at (gimple_location (stmt),
3114 "non-OpenACC construct inside of OpenACC region");
3115 return false;
3119 if (ctx != NULL)
3121 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3122 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3124 c = omp_find_clause (gimple_omp_target_clauses (ctx->stmt),
3125 OMP_CLAUSE_DEVICE);
3126 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3128 error_at (gimple_location (stmt),
3129 "OpenMP constructs are not allowed in target region "
3130 "with %<ancestor%>");
3131 return false;
3134 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3135 ctx->teams_nested_p = true;
3136 else
3137 ctx->nonteams_nested_p = true;
3139 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3140 && ctx->outer
3141 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3142 ctx = ctx->outer;
3143 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3144 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3145 && !ctx->loop_p)
3147 c = NULL_TREE;
3148 if (ctx->order_concurrent
3149 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3150 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3151 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3153 error_at (gimple_location (stmt),
3154 "OpenMP constructs other than %<parallel%>, %<loop%>"
3155 " or %<simd%> may not be nested inside a region with"
3156 " the %<order(concurrent)%> clause");
3157 return false;
3159 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3161 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3162 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3164 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3165 && (ctx->outer == NULL
3166 || !gimple_omp_for_combined_into_p (ctx->stmt)
3167 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3168 || (gimple_omp_for_kind (ctx->outer->stmt)
3169 != GF_OMP_FOR_KIND_FOR)
3170 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3172 error_at (gimple_location (stmt),
3173 "%<ordered simd threads%> must be closely "
3174 "nested inside of %<%s simd%> region",
3175 lang_GNU_Fortran () ? "do" : "for");
3176 return false;
3178 return true;
3181 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3182 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3183 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3184 return true;
3185 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3186 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3187 return true;
3188 error_at (gimple_location (stmt),
3189 "OpenMP constructs other than "
3190 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3191 "not be nested inside %<simd%> region");
3192 return false;
3194 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3196 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3197 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3198 && omp_find_clause (gimple_omp_for_clauses (stmt),
3199 OMP_CLAUSE_BIND) == NULL_TREE))
3200 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3202 error_at (gimple_location (stmt),
3203 "only %<distribute%>, %<parallel%> or %<loop%> "
3204 "regions are allowed to be strictly nested inside "
3205 "%<teams%> region");
3206 return false;
3209 else if (ctx->order_concurrent
3210 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3211 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3212 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3213 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3215 if (ctx->loop_p)
3216 error_at (gimple_location (stmt),
3217 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3218 "%<simd%> may not be nested inside a %<loop%> region");
3219 else
3220 error_at (gimple_location (stmt),
3221 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3222 "%<simd%> may not be nested inside a region with "
3223 "the %<order(concurrent)%> clause");
3224 return false;
3227 switch (gimple_code (stmt))
3229 case GIMPLE_OMP_FOR:
3230 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3231 return true;
3232 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3234 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3236 error_at (gimple_location (stmt),
3237 "%<distribute%> region must be strictly nested "
3238 "inside %<teams%> construct");
3239 return false;
3241 return true;
3243 /* We split taskloop into task and nested taskloop in it. */
3244 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3245 return true;
3246 /* For now, hope this will change and loop bind(parallel) will not
3247 be allowed in lots of contexts. */
3248 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3249 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3250 return true;
3251 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3253 bool ok = false;
3255 if (ctx)
3256 switch (gimple_code (ctx->stmt))
3258 case GIMPLE_OMP_FOR:
3259 ok = (gimple_omp_for_kind (ctx->stmt)
3260 == GF_OMP_FOR_KIND_OACC_LOOP);
3261 break;
3263 case GIMPLE_OMP_TARGET:
3264 switch (gimple_omp_target_kind (ctx->stmt))
3266 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3267 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3268 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3269 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3270 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3271 ok = true;
3272 break;
3274 default:
3275 break;
3278 default:
3279 break;
3281 else if (oacc_get_fn_attrib (current_function_decl))
3282 ok = true;
3283 if (!ok)
3285 error_at (gimple_location (stmt),
3286 "OpenACC loop directive must be associated with"
3287 " an OpenACC compute region");
3288 return false;
3291 /* FALLTHRU */
3292 case GIMPLE_CALL:
3293 if (is_gimple_call (stmt)
3294 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3295 == BUILT_IN_GOMP_CANCEL
3296 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3297 == BUILT_IN_GOMP_CANCELLATION_POINT))
3299 const char *bad = NULL;
3300 const char *kind = NULL;
3301 const char *construct
3302 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3303 == BUILT_IN_GOMP_CANCEL)
3304 ? "cancel"
3305 : "cancellation point";
3306 if (ctx == NULL)
3308 error_at (gimple_location (stmt), "orphaned %qs construct",
3309 construct);
3310 return false;
3312 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3313 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3314 : 0)
3316 case 1:
3317 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3318 bad = "parallel";
3319 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3320 == BUILT_IN_GOMP_CANCEL
3321 && !integer_zerop (gimple_call_arg (stmt, 1)))
3322 ctx->cancellable = true;
3323 kind = "parallel";
3324 break;
3325 case 2:
3326 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3327 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3328 bad = "for";
3329 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3330 == BUILT_IN_GOMP_CANCEL
3331 && !integer_zerop (gimple_call_arg (stmt, 1)))
3333 ctx->cancellable = true;
3334 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3335 OMP_CLAUSE_NOWAIT))
3336 warning_at (gimple_location (stmt), 0,
3337 "%<cancel for%> inside "
3338 "%<nowait%> for construct");
3339 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3340 OMP_CLAUSE_ORDERED))
3341 warning_at (gimple_location (stmt), 0,
3342 "%<cancel for%> inside "
3343 "%<ordered%> for construct");
3345 kind = "for";
3346 break;
3347 case 4:
3348 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3349 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3350 bad = "sections";
3351 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3352 == BUILT_IN_GOMP_CANCEL
3353 && !integer_zerop (gimple_call_arg (stmt, 1)))
3355 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3357 ctx->cancellable = true;
3358 if (omp_find_clause (gimple_omp_sections_clauses
3359 (ctx->stmt),
3360 OMP_CLAUSE_NOWAIT))
3361 warning_at (gimple_location (stmt), 0,
3362 "%<cancel sections%> inside "
3363 "%<nowait%> sections construct");
3365 else
3367 gcc_assert (ctx->outer
3368 && gimple_code (ctx->outer->stmt)
3369 == GIMPLE_OMP_SECTIONS);
3370 ctx->outer->cancellable = true;
3371 if (omp_find_clause (gimple_omp_sections_clauses
3372 (ctx->outer->stmt),
3373 OMP_CLAUSE_NOWAIT))
3374 warning_at (gimple_location (stmt), 0,
3375 "%<cancel sections%> inside "
3376 "%<nowait%> sections construct");
3379 kind = "sections";
3380 break;
3381 case 8:
3382 if (!is_task_ctx (ctx)
3383 && (!is_taskloop_ctx (ctx)
3384 || ctx->outer == NULL
3385 || !is_task_ctx (ctx->outer)))
3386 bad = "task";
3387 else
3389 for (omp_context *octx = ctx->outer;
3390 octx; octx = octx->outer)
3392 switch (gimple_code (octx->stmt))
3394 case GIMPLE_OMP_TASKGROUP:
3395 break;
3396 case GIMPLE_OMP_TARGET:
3397 if (gimple_omp_target_kind (octx->stmt)
3398 != GF_OMP_TARGET_KIND_REGION)
3399 continue;
3400 /* FALLTHRU */
3401 case GIMPLE_OMP_PARALLEL:
3402 case GIMPLE_OMP_TEAMS:
3403 error_at (gimple_location (stmt),
3404 "%<%s taskgroup%> construct not closely "
3405 "nested inside of %<taskgroup%> region",
3406 construct);
3407 return false;
3408 case GIMPLE_OMP_TASK:
3409 if (gimple_omp_task_taskloop_p (octx->stmt)
3410 && octx->outer
3411 && is_taskloop_ctx (octx->outer))
3413 tree clauses
3414 = gimple_omp_for_clauses (octx->outer->stmt);
3415 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3416 break;
3418 continue;
3419 default:
3420 continue;
3422 break;
3424 ctx->cancellable = true;
3426 kind = "taskgroup";
3427 break;
3428 default:
3429 error_at (gimple_location (stmt), "invalid arguments");
3430 return false;
3432 if (bad)
3434 error_at (gimple_location (stmt),
3435 "%<%s %s%> construct not closely nested inside of %qs",
3436 construct, kind, bad);
3437 return false;
3440 /* FALLTHRU */
3441 case GIMPLE_OMP_SECTIONS:
3442 case GIMPLE_OMP_SINGLE:
3443 for (; ctx != NULL; ctx = ctx->outer)
3444 switch (gimple_code (ctx->stmt))
3446 case GIMPLE_OMP_FOR:
3447 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3448 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3449 break;
3450 /* FALLTHRU */
3451 case GIMPLE_OMP_SECTIONS:
3452 case GIMPLE_OMP_SINGLE:
3453 case GIMPLE_OMP_ORDERED:
3454 case GIMPLE_OMP_MASTER:
3455 case GIMPLE_OMP_MASKED:
3456 case GIMPLE_OMP_TASK:
3457 case GIMPLE_OMP_CRITICAL:
3458 if (is_gimple_call (stmt))
3460 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3461 != BUILT_IN_GOMP_BARRIER)
3462 return true;
3463 error_at (gimple_location (stmt),
3464 "barrier region may not be closely nested inside "
3465 "of work-sharing, %<loop%>, %<critical%>, "
3466 "%<ordered%>, %<master%>, %<masked%>, explicit "
3467 "%<task%> or %<taskloop%> region");
3468 return false;
3470 error_at (gimple_location (stmt),
3471 "work-sharing region may not be closely nested inside "
3472 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3473 "%<master%>, %<masked%>, explicit %<task%> or "
3474 "%<taskloop%> region");
3475 return false;
3476 case GIMPLE_OMP_PARALLEL:
3477 case GIMPLE_OMP_TEAMS:
3478 return true;
3479 case GIMPLE_OMP_TARGET:
3480 if (gimple_omp_target_kind (ctx->stmt)
3481 == GF_OMP_TARGET_KIND_REGION)
3482 return true;
3483 break;
3484 default:
3485 break;
3487 break;
3488 case GIMPLE_OMP_MASTER:
3489 case GIMPLE_OMP_MASKED:
3490 for (; ctx != NULL; ctx = ctx->outer)
3491 switch (gimple_code (ctx->stmt))
3493 case GIMPLE_OMP_FOR:
3494 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3495 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3496 break;
3497 /* FALLTHRU */
3498 case GIMPLE_OMP_SECTIONS:
3499 case GIMPLE_OMP_SINGLE:
3500 case GIMPLE_OMP_TASK:
3501 error_at (gimple_location (stmt),
3502 "%qs region may not be closely nested inside "
3503 "of work-sharing, %<loop%>, explicit %<task%> or "
3504 "%<taskloop%> region",
3505 gimple_code (stmt) == GIMPLE_OMP_MASTER
3506 ? "master" : "masked");
3507 return false;
3508 case GIMPLE_OMP_PARALLEL:
3509 case GIMPLE_OMP_TEAMS:
3510 return true;
3511 case GIMPLE_OMP_TARGET:
3512 if (gimple_omp_target_kind (ctx->stmt)
3513 == GF_OMP_TARGET_KIND_REGION)
3514 return true;
3515 break;
3516 default:
3517 break;
3519 break;
3520 case GIMPLE_OMP_SCOPE:
3521 for (; ctx != NULL; ctx = ctx->outer)
3522 switch (gimple_code (ctx->stmt))
3524 case GIMPLE_OMP_FOR:
3525 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3526 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3527 break;
3528 /* FALLTHRU */
3529 case GIMPLE_OMP_SECTIONS:
3530 case GIMPLE_OMP_SINGLE:
3531 case GIMPLE_OMP_TASK:
3532 case GIMPLE_OMP_CRITICAL:
3533 case GIMPLE_OMP_ORDERED:
3534 case GIMPLE_OMP_MASTER:
3535 case GIMPLE_OMP_MASKED:
3536 error_at (gimple_location (stmt),
3537 "%<scope%> region may not be closely nested inside "
3538 "of work-sharing, %<loop%>, explicit %<task%>, "
3539 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3540 "or %<masked%> region");
3541 return false;
3542 case GIMPLE_OMP_PARALLEL:
3543 case GIMPLE_OMP_TEAMS:
3544 return true;
3545 case GIMPLE_OMP_TARGET:
3546 if (gimple_omp_target_kind (ctx->stmt)
3547 == GF_OMP_TARGET_KIND_REGION)
3548 return true;
3549 break;
3550 default:
3551 break;
3553 break;
3554 case GIMPLE_OMP_TASK:
3555 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3556 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3557 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3558 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3560 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3561 error_at (OMP_CLAUSE_LOCATION (c),
3562 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3563 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3564 return false;
3566 break;
3567 case GIMPLE_OMP_ORDERED:
3568 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3569 c; c = OMP_CLAUSE_CHAIN (c))
3571 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3573 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3574 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3575 continue;
3577 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3578 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3579 || kind == OMP_CLAUSE_DEPEND_SINK)
3581 tree oclause;
3582 /* Look for containing ordered(N) loop. */
3583 if (ctx == NULL
3584 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3585 || (oclause
3586 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3587 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3589 error_at (OMP_CLAUSE_LOCATION (c),
3590 "%<ordered%> construct with %<depend%> clause "
3591 "must be closely nested inside an %<ordered%> "
3592 "loop");
3593 return false;
3595 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3597 error_at (OMP_CLAUSE_LOCATION (c),
3598 "%<ordered%> construct with %<depend%> clause "
3599 "must be closely nested inside a loop with "
3600 "%<ordered%> clause with a parameter");
3601 return false;
3604 else
3606 error_at (OMP_CLAUSE_LOCATION (c),
3607 "invalid depend kind in omp %<ordered%> %<depend%>");
3608 return false;
3611 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3612 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3614 /* ordered simd must be closely nested inside of simd region,
3615 and simd region must not encounter constructs other than
3616 ordered simd, therefore ordered simd may be either orphaned,
3617 or ctx->stmt must be simd. The latter case is handled already
3618 earlier. */
3619 if (ctx != NULL)
3621 error_at (gimple_location (stmt),
3622 "%<ordered%> %<simd%> must be closely nested inside "
3623 "%<simd%> region");
3624 return false;
3627 for (; ctx != NULL; ctx = ctx->outer)
3628 switch (gimple_code (ctx->stmt))
3630 case GIMPLE_OMP_CRITICAL:
3631 case GIMPLE_OMP_TASK:
3632 case GIMPLE_OMP_ORDERED:
3633 ordered_in_taskloop:
3634 error_at (gimple_location (stmt),
3635 "%<ordered%> region may not be closely nested inside "
3636 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3637 "%<taskloop%> region");
3638 return false;
3639 case GIMPLE_OMP_FOR:
3640 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3641 goto ordered_in_taskloop;
3642 tree o;
3643 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3644 OMP_CLAUSE_ORDERED);
3645 if (o == NULL)
3647 error_at (gimple_location (stmt),
3648 "%<ordered%> region must be closely nested inside "
3649 "a loop region with an %<ordered%> clause");
3650 return false;
3652 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3653 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3655 error_at (gimple_location (stmt),
3656 "%<ordered%> region without %<depend%> clause may "
3657 "not be closely nested inside a loop region with "
3658 "an %<ordered%> clause with a parameter");
3659 return false;
3661 return true;
3662 case GIMPLE_OMP_TARGET:
3663 if (gimple_omp_target_kind (ctx->stmt)
3664 != GF_OMP_TARGET_KIND_REGION)
3665 break;
3666 /* FALLTHRU */
3667 case GIMPLE_OMP_PARALLEL:
3668 case GIMPLE_OMP_TEAMS:
3669 error_at (gimple_location (stmt),
3670 "%<ordered%> region must be closely nested inside "
3671 "a loop region with an %<ordered%> clause");
3672 return false;
3673 default:
3674 break;
3676 break;
3677 case GIMPLE_OMP_CRITICAL:
3679 tree this_stmt_name
3680 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3681 for (; ctx != NULL; ctx = ctx->outer)
3682 if (gomp_critical *other_crit
3683 = dyn_cast <gomp_critical *> (ctx->stmt))
3684 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3686 error_at (gimple_location (stmt),
3687 "%<critical%> region may not be nested inside "
3688 "a %<critical%> region with the same name");
3689 return false;
3692 break;
3693 case GIMPLE_OMP_TEAMS:
3694 if (ctx == NULL)
3695 break;
3696 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3697 || (gimple_omp_target_kind (ctx->stmt)
3698 != GF_OMP_TARGET_KIND_REGION))
3700 /* Teams construct can appear either strictly nested inside of
3701 target construct with no intervening stmts, or can be encountered
3702 only by initial task (so must not appear inside any OpenMP
3703 construct. */
3704 error_at (gimple_location (stmt),
3705 "%<teams%> construct must be closely nested inside of "
3706 "%<target%> construct or not nested in any OpenMP "
3707 "construct");
3708 return false;
3710 break;
3711 case GIMPLE_OMP_TARGET:
3712 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3713 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3714 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3715 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3717 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3718 error_at (OMP_CLAUSE_LOCATION (c),
3719 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3720 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3721 return false;
3723 if (is_gimple_omp_offloaded (stmt)
3724 && oacc_get_fn_attrib (cfun->decl) != NULL)
3726 error_at (gimple_location (stmt),
3727 "OpenACC region inside of OpenACC routine, nested "
3728 "parallelism not supported yet");
3729 return false;
3731 for (; ctx != NULL; ctx = ctx->outer)
3733 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3735 if (is_gimple_omp (stmt)
3736 && is_gimple_omp_oacc (stmt)
3737 && is_gimple_omp (ctx->stmt))
3739 error_at (gimple_location (stmt),
3740 "OpenACC construct inside of non-OpenACC region");
3741 return false;
3743 continue;
3746 const char *stmt_name, *ctx_stmt_name;
3747 switch (gimple_omp_target_kind (stmt))
3749 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3750 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3751 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3752 case GF_OMP_TARGET_KIND_ENTER_DATA:
3753 stmt_name = "target enter data"; break;
3754 case GF_OMP_TARGET_KIND_EXIT_DATA:
3755 stmt_name = "target exit data"; break;
3756 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3757 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3758 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3759 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3760 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3761 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3762 stmt_name = "enter data"; break;
3763 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3764 stmt_name = "exit data"; break;
3765 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3766 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3767 break;
3768 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3769 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3770 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3771 /* OpenACC 'kernels' decomposed parts. */
3772 stmt_name = "kernels"; break;
3773 default: gcc_unreachable ();
3775 switch (gimple_omp_target_kind (ctx->stmt))
3777 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3778 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3779 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3780 ctx_stmt_name = "parallel"; break;
3781 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3782 ctx_stmt_name = "kernels"; break;
3783 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3784 ctx_stmt_name = "serial"; break;
3785 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3786 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3787 ctx_stmt_name = "host_data"; break;
3788 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3789 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3790 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3791 /* OpenACC 'kernels' decomposed parts. */
3792 ctx_stmt_name = "kernels"; break;
3793 default: gcc_unreachable ();
3796 /* OpenACC/OpenMP mismatch? */
3797 if (is_gimple_omp_oacc (stmt)
3798 != is_gimple_omp_oacc (ctx->stmt))
3800 error_at (gimple_location (stmt),
3801 "%s %qs construct inside of %s %qs region",
3802 (is_gimple_omp_oacc (stmt)
3803 ? "OpenACC" : "OpenMP"), stmt_name,
3804 (is_gimple_omp_oacc (ctx->stmt)
3805 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3806 return false;
3808 if (is_gimple_omp_offloaded (ctx->stmt))
3810 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3811 if (is_gimple_omp_oacc (ctx->stmt))
3813 error_at (gimple_location (stmt),
3814 "%qs construct inside of %qs region",
3815 stmt_name, ctx_stmt_name);
3816 return false;
3818 else
3820 warning_at (gimple_location (stmt), 0,
3821 "%qs construct inside of %qs region",
3822 stmt_name, ctx_stmt_name);
3826 break;
3827 default:
3828 break;
3830 return true;
3834 /* Helper function scan_omp.
3836 Callback for walk_tree or operators in walk_gimple_stmt used to
3837 scan for OMP directives in TP. */
3839 static tree
3840 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3842 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3843 omp_context *ctx = (omp_context *) wi->info;
3844 tree t = *tp;
3846 switch (TREE_CODE (t))
3848 case VAR_DECL:
3849 case PARM_DECL:
3850 case LABEL_DECL:
3851 case RESULT_DECL:
3852 if (ctx)
3854 tree repl = remap_decl (t, &ctx->cb);
3855 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3856 *tp = repl;
3858 break;
3860 default:
3861 if (ctx && TYPE_P (t))
3862 *tp = remap_type (t, &ctx->cb);
3863 else if (!DECL_P (t))
3865 *walk_subtrees = 1;
3866 if (ctx)
3868 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3869 if (tem != TREE_TYPE (t))
3871 if (TREE_CODE (t) == INTEGER_CST)
3872 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3873 else
3874 TREE_TYPE (t) = tem;
3878 break;
3881 return NULL_TREE;
3884 /* Return true if FNDECL is a setjmp or a longjmp. */
3886 static bool
3887 setjmp_or_longjmp_p (const_tree fndecl)
3889 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3890 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3891 return true;
3893 tree declname = DECL_NAME (fndecl);
3894 if (!declname
3895 || (DECL_CONTEXT (fndecl) != NULL_TREE
3896 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3897 || !TREE_PUBLIC (fndecl))
3898 return false;
3900 const char *name = IDENTIFIER_POINTER (declname);
3901 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3904 /* Return true if FNDECL is an omp_* runtime API call. */
3906 static bool
3907 omp_runtime_api_call (const_tree fndecl)
3909 tree declname = DECL_NAME (fndecl);
3910 if (!declname
3911 || (DECL_CONTEXT (fndecl) != NULL_TREE
3912 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3913 || !TREE_PUBLIC (fndecl))
3914 return false;
3916 const char *name = IDENTIFIER_POINTER (declname);
3917 if (!startswith (name, "omp_"))
3918 return false;
3920 static const char *omp_runtime_apis[] =
3922 /* This array has 3 sections. First omp_* calls that don't
3923 have any suffixes. */
3924 "omp_alloc",
3925 "omp_free",
3926 "target_alloc",
3927 "target_associate_ptr",
3928 "target_disassociate_ptr",
3929 "target_free",
3930 "target_is_present",
3931 "target_memcpy",
3932 "target_memcpy_rect",
3933 NULL,
3934 /* Now omp_* calls that are available as omp_* and omp_*_. */
3935 "capture_affinity",
3936 "destroy_allocator",
3937 "destroy_lock",
3938 "destroy_nest_lock",
3939 "display_affinity",
3940 "fulfill_event",
3941 "get_active_level",
3942 "get_affinity_format",
3943 "get_cancellation",
3944 "get_default_allocator",
3945 "get_default_device",
3946 "get_device_num",
3947 "get_dynamic",
3948 "get_initial_device",
3949 "get_level",
3950 "get_max_active_levels",
3951 "get_max_task_priority",
3952 "get_max_threads",
3953 "get_nested",
3954 "get_num_devices",
3955 "get_num_places",
3956 "get_num_procs",
3957 "get_num_teams",
3958 "get_num_threads",
3959 "get_partition_num_places",
3960 "get_place_num",
3961 "get_proc_bind",
3962 "get_supported_active_levels",
3963 "get_team_num",
3964 "get_thread_limit",
3965 "get_thread_num",
3966 "get_wtick",
3967 "get_wtime",
3968 "in_final",
3969 "in_parallel",
3970 "init_lock",
3971 "init_nest_lock",
3972 "is_initial_device",
3973 "pause_resource",
3974 "pause_resource_all",
3975 "set_affinity_format",
3976 "set_default_allocator",
3977 "set_lock",
3978 "set_nest_lock",
3979 "test_lock",
3980 "test_nest_lock",
3981 "unset_lock",
3982 "unset_nest_lock",
3983 NULL,
3984 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3985 "display_env",
3986 "get_ancestor_thread_num",
3987 "init_allocator",
3988 "get_partition_place_nums",
3989 "get_place_num_procs",
3990 "get_place_proc_ids",
3991 "get_schedule",
3992 "get_team_size",
3993 "set_default_device",
3994 "set_dynamic",
3995 "set_max_active_levels",
3996 "set_nested",
3997 "set_num_threads",
3998 "set_schedule"
4001 int mode = 0;
4002 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
4004 if (omp_runtime_apis[i] == NULL)
4006 mode++;
4007 continue;
4009 size_t len = strlen (omp_runtime_apis[i]);
4010 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
4011 && (name[4 + len] == '\0'
4012 || (mode > 0
4013 && name[4 + len] == '_'
4014 && (name[4 + len + 1] == '\0'
4015 || (mode > 1
4016 && strcmp (name + 4 + len + 1, "8_") == 0)))))
4017 return true;
4019 return false;
4022 /* Helper function for scan_omp.
4024 Callback for walk_gimple_stmt used to scan for OMP directives in
4025 the current statement in GSI. */
4027 static tree
4028 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4029 struct walk_stmt_info *wi)
4031 gimple *stmt = gsi_stmt (*gsi);
4032 omp_context *ctx = (omp_context *) wi->info;
4034 if (gimple_has_location (stmt))
4035 input_location = gimple_location (stmt);
4037 /* Check the nesting restrictions. */
4038 bool remove = false;
4039 if (is_gimple_omp (stmt))
4040 remove = !check_omp_nesting_restrictions (stmt, ctx);
4041 else if (is_gimple_call (stmt))
4043 tree fndecl = gimple_call_fndecl (stmt);
4044 if (fndecl)
4046 if (ctx
4047 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4048 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4049 && setjmp_or_longjmp_p (fndecl)
4050 && !ctx->loop_p)
4052 remove = true;
4053 error_at (gimple_location (stmt),
4054 "setjmp/longjmp inside %<simd%> construct");
4056 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4057 switch (DECL_FUNCTION_CODE (fndecl))
4059 case BUILT_IN_GOMP_BARRIER:
4060 case BUILT_IN_GOMP_CANCEL:
4061 case BUILT_IN_GOMP_CANCELLATION_POINT:
4062 case BUILT_IN_GOMP_TASKYIELD:
4063 case BUILT_IN_GOMP_TASKWAIT:
4064 case BUILT_IN_GOMP_TASKGROUP_START:
4065 case BUILT_IN_GOMP_TASKGROUP_END:
4066 remove = !check_omp_nesting_restrictions (stmt, ctx);
4067 break;
4068 default:
4069 break;
4071 else if (ctx)
4073 omp_context *octx = ctx;
4074 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4075 octx = ctx->outer;
4076 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4078 remove = true;
4079 error_at (gimple_location (stmt),
4080 "OpenMP runtime API call %qD in a region with "
4081 "%<order(concurrent)%> clause", fndecl);
4083 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
4084 && (gimple_omp_target_kind (ctx->stmt)
4085 == GF_OMP_TARGET_KIND_REGION))
4087 tree tgt_clauses = gimple_omp_target_clauses (ctx->stmt);
4088 tree c = omp_find_clause (tgt_clauses, OMP_CLAUSE_DEVICE);
4089 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4090 error_at (gimple_location (stmt),
4091 "OpenMP runtime API call %qD in a region with "
4092 "%<device(ancestor)%> clause", fndecl);
4097 if (remove)
4099 stmt = gimple_build_nop ();
4100 gsi_replace (gsi, stmt, false);
4103 *handled_ops_p = true;
4105 switch (gimple_code (stmt))
4107 case GIMPLE_OMP_PARALLEL:
4108 taskreg_nesting_level++;
4109 scan_omp_parallel (gsi, ctx);
4110 taskreg_nesting_level--;
4111 break;
4113 case GIMPLE_OMP_TASK:
4114 taskreg_nesting_level++;
4115 scan_omp_task (gsi, ctx);
4116 taskreg_nesting_level--;
4117 break;
4119 case GIMPLE_OMP_FOR:
4120 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4121 == GF_OMP_FOR_KIND_SIMD)
4122 && gimple_omp_for_combined_into_p (stmt)
4123 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4125 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4126 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4127 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4129 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4130 break;
4133 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4134 == GF_OMP_FOR_KIND_SIMD)
4135 && omp_maybe_offloaded_ctx (ctx)
4136 && omp_max_simt_vf ()
4137 && gimple_omp_for_collapse (stmt) == 1)
4138 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4139 else
4140 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4141 break;
4143 case GIMPLE_OMP_SCOPE:
4144 ctx = new_omp_context (stmt, ctx);
4145 scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4146 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4147 break;
4149 case GIMPLE_OMP_SECTIONS:
4150 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4151 break;
4153 case GIMPLE_OMP_SINGLE:
4154 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4155 break;
4157 case GIMPLE_OMP_SCAN:
4158 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4160 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4161 ctx->scan_inclusive = true;
4162 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4163 ctx->scan_exclusive = true;
4165 /* FALLTHRU */
4166 case GIMPLE_OMP_SECTION:
4167 case GIMPLE_OMP_MASTER:
4168 case GIMPLE_OMP_ORDERED:
4169 case GIMPLE_OMP_CRITICAL:
4170 ctx = new_omp_context (stmt, ctx);
4171 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4172 break;
4174 case GIMPLE_OMP_MASKED:
4175 ctx = new_omp_context (stmt, ctx);
4176 scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4177 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4178 break;
4180 case GIMPLE_OMP_TASKGROUP:
4181 ctx = new_omp_context (stmt, ctx);
4182 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4183 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4184 break;
4186 case GIMPLE_OMP_TARGET:
4187 if (is_gimple_omp_offloaded (stmt))
4189 taskreg_nesting_level++;
4190 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4191 taskreg_nesting_level--;
4193 else
4194 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4195 break;
4197 case GIMPLE_OMP_TEAMS:
4198 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4200 taskreg_nesting_level++;
4201 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4202 taskreg_nesting_level--;
4204 else
4205 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4206 break;
4208 case GIMPLE_BIND:
4210 tree var;
4212 *handled_ops_p = false;
4213 if (ctx)
4214 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4215 var ;
4216 var = DECL_CHAIN (var))
4217 insert_decl_map (&ctx->cb, var, var);
4219 break;
4220 default:
4221 *handled_ops_p = false;
4222 break;
4225 return NULL_TREE;
4229 /* Scan all the statements starting at the current statement. CTX
4230 contains context information about the OMP directives and
4231 clauses found during the scan. */
4233 static void
4234 scan_omp (gimple_seq *body_p, omp_context *ctx)
4236 location_t saved_location;
4237 struct walk_stmt_info wi;
4239 memset (&wi, 0, sizeof (wi));
4240 wi.info = ctx;
4241 wi.want_locations = true;
4243 saved_location = input_location;
4244 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4245 input_location = saved_location;
4248 /* Re-gimplification and code generation routines. */
4250 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4251 of BIND if in a method. */
4253 static void
4254 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4256 if (DECL_ARGUMENTS (current_function_decl)
4257 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4258 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4259 == POINTER_TYPE))
4261 tree vars = gimple_bind_vars (bind);
4262 for (tree *pvar = &vars; *pvar; )
4263 if (omp_member_access_dummy_var (*pvar))
4264 *pvar = DECL_CHAIN (*pvar);
4265 else
4266 pvar = &DECL_CHAIN (*pvar);
4267 gimple_bind_set_vars (bind, vars);
4271 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4272 block and its subblocks. */
4274 static void
4275 remove_member_access_dummy_vars (tree block)
4277 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4278 if (omp_member_access_dummy_var (*pvar))
4279 *pvar = DECL_CHAIN (*pvar);
4280 else
4281 pvar = &DECL_CHAIN (*pvar);
4283 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4284 remove_member_access_dummy_vars (block);
4287 /* If a context was created for STMT when it was scanned, return it. */
4289 static omp_context *
4290 maybe_lookup_ctx (gimple *stmt)
4292 splay_tree_node n;
4293 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4294 return n ? (omp_context *) n->value : NULL;
4298 /* Find the mapping for DECL in CTX or the immediately enclosing
4299 context that has a mapping for DECL.
4301 If CTX is a nested parallel directive, we may have to use the decl
4302 mappings created in CTX's parent context. Suppose that we have the
4303 following parallel nesting (variable UIDs showed for clarity):
4305 iD.1562 = 0;
4306 #omp parallel shared(iD.1562) -> outer parallel
4307 iD.1562 = iD.1562 + 1;
4309 #omp parallel shared (iD.1562) -> inner parallel
4310 iD.1562 = iD.1562 - 1;
4312 Each parallel structure will create a distinct .omp_data_s structure
4313 for copying iD.1562 in/out of the directive:
4315 outer parallel .omp_data_s.1.i -> iD.1562
4316 inner parallel .omp_data_s.2.i -> iD.1562
4318 A shared variable mapping will produce a copy-out operation before
4319 the parallel directive and a copy-in operation after it. So, in
4320 this case we would have:
4322 iD.1562 = 0;
4323 .omp_data_o.1.i = iD.1562;
4324 #omp parallel shared(iD.1562) -> outer parallel
4325 .omp_data_i.1 = &.omp_data_o.1
4326 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4328 .omp_data_o.2.i = iD.1562; -> **
4329 #omp parallel shared(iD.1562) -> inner parallel
4330 .omp_data_i.2 = &.omp_data_o.2
4331 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4334 ** This is a problem. The symbol iD.1562 cannot be referenced
4335 inside the body of the outer parallel region. But since we are
4336 emitting this copy operation while expanding the inner parallel
4337 directive, we need to access the CTX structure of the outer
4338 parallel directive to get the correct mapping:
4340 .omp_data_o.2.i = .omp_data_i.1->i
4342 Since there may be other workshare or parallel directives enclosing
4343 the parallel directive, it may be necessary to walk up the context
4344 parent chain. This is not a problem in general because nested
4345 parallelism happens only rarely. */
4347 static tree
4348 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4350 tree t;
4351 omp_context *up;
4353 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4354 t = maybe_lookup_decl (decl, up);
4356 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4358 return t ? t : decl;
4362 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4363 in outer contexts. */
4365 static tree
4366 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4368 tree t = NULL;
4369 omp_context *up;
4371 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4372 t = maybe_lookup_decl (decl, up);
4374 return t ? t : decl;
4378 /* Construct the initialization value for reduction operation OP. */
4380 tree
4381 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4383 switch (op)
4385 case PLUS_EXPR:
4386 case MINUS_EXPR:
4387 case BIT_IOR_EXPR:
4388 case BIT_XOR_EXPR:
4389 case TRUTH_OR_EXPR:
4390 case TRUTH_ORIF_EXPR:
4391 case TRUTH_XOR_EXPR:
4392 case NE_EXPR:
4393 return build_zero_cst (type);
4395 case MULT_EXPR:
4396 case TRUTH_AND_EXPR:
4397 case TRUTH_ANDIF_EXPR:
4398 case EQ_EXPR:
4399 return fold_convert_loc (loc, type, integer_one_node);
4401 case BIT_AND_EXPR:
4402 return fold_convert_loc (loc, type, integer_minus_one_node);
4404 case MAX_EXPR:
4405 if (SCALAR_FLOAT_TYPE_P (type))
4407 REAL_VALUE_TYPE max, min;
4408 if (HONOR_INFINITIES (type))
4410 real_inf (&max);
4411 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4413 else
4414 real_maxval (&min, 1, TYPE_MODE (type));
4415 return build_real (type, min);
4417 else if (POINTER_TYPE_P (type))
4419 wide_int min
4420 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4421 return wide_int_to_tree (type, min);
4423 else
4425 gcc_assert (INTEGRAL_TYPE_P (type));
4426 return TYPE_MIN_VALUE (type);
4429 case MIN_EXPR:
4430 if (SCALAR_FLOAT_TYPE_P (type))
4432 REAL_VALUE_TYPE max;
4433 if (HONOR_INFINITIES (type))
4434 real_inf (&max);
4435 else
4436 real_maxval (&max, 0, TYPE_MODE (type));
4437 return build_real (type, max);
4439 else if (POINTER_TYPE_P (type))
4441 wide_int max
4442 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4443 return wide_int_to_tree (type, max);
4445 else
4447 gcc_assert (INTEGRAL_TYPE_P (type));
4448 return TYPE_MAX_VALUE (type);
4451 default:
4452 gcc_unreachable ();
4456 /* Construct the initialization value for reduction CLAUSE. */
4458 tree
4459 omp_reduction_init (tree clause, tree type)
4461 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4462 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4465 /* Return alignment to be assumed for var in CLAUSE, which should be
4466 OMP_CLAUSE_ALIGNED. */
4468 static tree
4469 omp_clause_aligned_alignment (tree clause)
4471 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4472 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4474 /* Otherwise return implementation defined alignment. */
4475 unsigned int al = 1;
4476 opt_scalar_mode mode_iter;
4477 auto_vector_modes modes;
4478 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4479 static enum mode_class classes[]
4480 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4481 for (int i = 0; i < 4; i += 2)
4482 /* The for loop above dictates that we only walk through scalar classes. */
4483 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4485 scalar_mode mode = mode_iter.require ();
4486 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4487 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4488 continue;
4489 machine_mode alt_vmode;
4490 for (unsigned int j = 0; j < modes.length (); ++j)
4491 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4492 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4493 vmode = alt_vmode;
4495 tree type = lang_hooks.types.type_for_mode (mode, 1);
4496 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4497 continue;
4498 type = build_vector_type_for_mode (type, vmode);
4499 if (TYPE_MODE (type) != vmode)
4500 continue;
4501 if (TYPE_ALIGN_UNIT (type) > al)
4502 al = TYPE_ALIGN_UNIT (type);
4504 return build_int_cst (integer_type_node, al);
4508 /* This structure is part of the interface between lower_rec_simd_input_clauses
4509 and lower_rec_input_clauses. */
4511 class omplow_simd_context {
4512 public:
4513 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4514 tree idx;
4515 tree lane;
4516 tree lastlane;
4517 vec<tree, va_heap> simt_eargs;
4518 gimple_seq simt_dlist;
4519 poly_uint64_pod max_vf;
4520 bool is_simt;
4523 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4524 privatization. */
4526 static bool
4527 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4528 omplow_simd_context *sctx, tree &ivar,
4529 tree &lvar, tree *rvar = NULL,
4530 tree *rvar2 = NULL)
4532 if (known_eq (sctx->max_vf, 0U))
4534 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4535 if (maybe_gt (sctx->max_vf, 1U))
4537 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4538 OMP_CLAUSE_SAFELEN);
4539 if (c)
4541 poly_uint64 safe_len;
4542 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4543 || maybe_lt (safe_len, 1U))
4544 sctx->max_vf = 1;
4545 else
4546 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4549 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4551 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4552 c = OMP_CLAUSE_CHAIN (c))
4554 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4555 continue;
4557 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4559 /* UDR reductions are not supported yet for SIMT, disable
4560 SIMT. */
4561 sctx->max_vf = 1;
4562 break;
4565 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4566 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4568 /* Doing boolean operations on non-integral types is
4569 for conformance only, it's not worth supporting this
4570 for SIMT. */
4571 sctx->max_vf = 1;
4572 break;
4576 if (maybe_gt (sctx->max_vf, 1U))
4578 sctx->idx = create_tmp_var (unsigned_type_node);
4579 sctx->lane = create_tmp_var (unsigned_type_node);
4582 if (known_eq (sctx->max_vf, 1U))
4583 return false;
4585 if (sctx->is_simt)
4587 if (is_gimple_reg (new_var))
4589 ivar = lvar = new_var;
4590 return true;
4592 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4593 ivar = lvar = create_tmp_var (type);
4594 TREE_ADDRESSABLE (ivar) = 1;
4595 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4596 NULL, DECL_ATTRIBUTES (ivar));
4597 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4598 tree clobber = build_clobber (type);
4599 gimple *g = gimple_build_assign (ivar, clobber);
4600 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4602 else
4604 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4605 tree avar = create_tmp_var_raw (atype);
4606 if (TREE_ADDRESSABLE (new_var))
4607 TREE_ADDRESSABLE (avar) = 1;
4608 DECL_ATTRIBUTES (avar)
4609 = tree_cons (get_identifier ("omp simd array"), NULL,
4610 DECL_ATTRIBUTES (avar));
4611 gimple_add_tmp_var (avar);
4612 tree iavar = avar;
4613 if (rvar && !ctx->for_simd_scan_phase)
4615 /* For inscan reductions, create another array temporary,
4616 which will hold the reduced value. */
4617 iavar = create_tmp_var_raw (atype);
4618 if (TREE_ADDRESSABLE (new_var))
4619 TREE_ADDRESSABLE (iavar) = 1;
4620 DECL_ATTRIBUTES (iavar)
4621 = tree_cons (get_identifier ("omp simd array"), NULL,
4622 tree_cons (get_identifier ("omp simd inscan"), NULL,
4623 DECL_ATTRIBUTES (iavar)));
4624 gimple_add_tmp_var (iavar);
4625 ctx->cb.decl_map->put (avar, iavar);
4626 if (sctx->lastlane == NULL_TREE)
4627 sctx->lastlane = create_tmp_var (unsigned_type_node);
4628 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4629 sctx->lastlane, NULL_TREE, NULL_TREE);
4630 TREE_THIS_NOTRAP (*rvar) = 1;
4632 if (ctx->scan_exclusive)
4634 /* And for exclusive scan yet another one, which will
4635 hold the value during the scan phase. */
4636 tree savar = create_tmp_var_raw (atype);
4637 if (TREE_ADDRESSABLE (new_var))
4638 TREE_ADDRESSABLE (savar) = 1;
4639 DECL_ATTRIBUTES (savar)
4640 = tree_cons (get_identifier ("omp simd array"), NULL,
4641 tree_cons (get_identifier ("omp simd inscan "
4642 "exclusive"), NULL,
4643 DECL_ATTRIBUTES (savar)));
4644 gimple_add_tmp_var (savar);
4645 ctx->cb.decl_map->put (iavar, savar);
4646 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4647 sctx->idx, NULL_TREE, NULL_TREE);
4648 TREE_THIS_NOTRAP (*rvar2) = 1;
4651 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4652 NULL_TREE, NULL_TREE);
4653 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4654 NULL_TREE, NULL_TREE);
4655 TREE_THIS_NOTRAP (ivar) = 1;
4656 TREE_THIS_NOTRAP (lvar) = 1;
4658 if (DECL_P (new_var))
4660 SET_DECL_VALUE_EXPR (new_var, lvar);
4661 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4663 return true;
4666 /* Helper function of lower_rec_input_clauses. For a reference
4667 in simd reduction, add an underlying variable it will reference. */
4669 static void
4670 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4672 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4673 if (TREE_CONSTANT (z))
4675 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4676 get_name (new_vard));
4677 gimple_add_tmp_var (z);
4678 TREE_ADDRESSABLE (z) = 1;
4679 z = build_fold_addr_expr_loc (loc, z);
4680 gimplify_assign (new_vard, z, ilist);
4684 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4685 code to emit (type) (tskred_temp[idx]). */
4687 static tree
4688 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4689 unsigned idx)
4691 unsigned HOST_WIDE_INT sz
4692 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4693 tree r = build2 (MEM_REF, pointer_sized_int_node,
4694 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4695 idx * sz));
4696 tree v = create_tmp_var (pointer_sized_int_node);
4697 gimple *g = gimple_build_assign (v, r);
4698 gimple_seq_add_stmt (ilist, g);
4699 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4701 v = create_tmp_var (type);
4702 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4703 gimple_seq_add_stmt (ilist, g);
4705 return v;
4708 /* Lower early initialization of privatized variable NEW_VAR
4709 if it needs an allocator (has allocate clause). */
4711 static bool
4712 lower_private_allocate (tree var, tree new_var, tree &allocator,
4713 tree &allocate_ptr, gimple_seq *ilist,
4714 omp_context *ctx, bool is_ref, tree size)
4716 if (allocator)
4717 return false;
4718 gcc_assert (allocate_ptr == NULL_TREE);
4719 if (ctx->allocate_map
4720 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4721 if (tree *allocatorp = ctx->allocate_map->get (var))
4722 allocator = *allocatorp;
4723 if (allocator == NULL_TREE)
4724 return false;
4725 if (!is_ref && omp_privatize_by_reference (var))
4727 allocator = NULL_TREE;
4728 return false;
4731 unsigned HOST_WIDE_INT ialign = 0;
4732 if (TREE_CODE (allocator) == TREE_LIST)
4734 ialign = tree_to_uhwi (TREE_VALUE (allocator));
4735 allocator = TREE_PURPOSE (allocator);
4737 if (TREE_CODE (allocator) != INTEGER_CST)
4738 allocator = build_outer_var_ref (allocator, ctx);
4739 allocator = fold_convert (pointer_sized_int_node, allocator);
4740 if (TREE_CODE (allocator) != INTEGER_CST)
4742 tree var = create_tmp_var (TREE_TYPE (allocator));
4743 gimplify_assign (var, allocator, ilist);
4744 allocator = var;
4747 tree ptr_type, align, sz = size;
4748 if (TYPE_P (new_var))
4750 ptr_type = build_pointer_type (new_var);
4751 ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4753 else if (is_ref)
4755 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4756 ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4758 else
4760 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4761 ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4762 if (sz == NULL_TREE)
4763 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4765 align = build_int_cst (size_type_node, ialign);
4766 if (TREE_CODE (sz) != INTEGER_CST)
4768 tree szvar = create_tmp_var (size_type_node);
4769 gimplify_assign (szvar, sz, ilist);
4770 sz = szvar;
4772 allocate_ptr = create_tmp_var (ptr_type);
4773 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4774 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4775 gimple_call_set_lhs (g, allocate_ptr);
4776 gimple_seq_add_stmt (ilist, g);
4777 if (!is_ref)
4779 tree x = build_simple_mem_ref (allocate_ptr);
4780 TREE_THIS_NOTRAP (x) = 1;
4781 SET_DECL_VALUE_EXPR (new_var, x);
4782 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4784 return true;
4787 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4788 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4789 private variables. Initialization statements go in ILIST, while calls
4790 to destructors go in DLIST. */
4792 static void
4793 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4794 omp_context *ctx, struct omp_for_data *fd)
4796 tree c, copyin_seq, x, ptr;
4797 bool copyin_by_ref = false;
4798 bool lastprivate_firstprivate = false;
4799 bool reduction_omp_orig_ref = false;
4800 int pass;
4801 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4802 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4803 omplow_simd_context sctx = omplow_simd_context ();
4804 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4805 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4806 gimple_seq llist[4] = { };
4807 tree nonconst_simd_if = NULL_TREE;
4809 copyin_seq = NULL;
4810 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4812 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4813 with data sharing clauses referencing variable sized vars. That
4814 is unnecessarily hard to support and very unlikely to result in
4815 vectorized code anyway. */
4816 if (is_simd)
4817 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4818 switch (OMP_CLAUSE_CODE (c))
4820 case OMP_CLAUSE_LINEAR:
4821 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4822 sctx.max_vf = 1;
4823 /* FALLTHRU */
4824 case OMP_CLAUSE_PRIVATE:
4825 case OMP_CLAUSE_FIRSTPRIVATE:
4826 case OMP_CLAUSE_LASTPRIVATE:
4827 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4828 sctx.max_vf = 1;
4829 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4831 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4832 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4833 sctx.max_vf = 1;
4835 break;
4836 case OMP_CLAUSE_REDUCTION:
4837 case OMP_CLAUSE_IN_REDUCTION:
4838 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4839 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4840 sctx.max_vf = 1;
4841 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4843 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4844 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4845 sctx.max_vf = 1;
4847 break;
4848 case OMP_CLAUSE_IF:
4849 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4850 sctx.max_vf = 1;
4851 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4852 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4853 break;
4854 case OMP_CLAUSE_SIMDLEN:
4855 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4856 sctx.max_vf = 1;
4857 break;
4858 case OMP_CLAUSE__CONDTEMP_:
4859 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4860 if (sctx.is_simt)
4861 sctx.max_vf = 1;
4862 break;
4863 default:
4864 continue;
4867 /* Add a placeholder for simduid. */
4868 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4869 sctx.simt_eargs.safe_push (NULL_TREE);
4871 unsigned task_reduction_cnt = 0;
4872 unsigned task_reduction_cntorig = 0;
4873 unsigned task_reduction_cnt_full = 0;
4874 unsigned task_reduction_cntorig_full = 0;
4875 unsigned task_reduction_other_cnt = 0;
4876 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4877 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4878 /* Do all the fixed sized types in the first pass, and the variable sized
4879 types in the second pass. This makes sure that the scalar arguments to
4880 the variable sized types are processed before we use them in the
4881 variable sized operations. For task reductions we use 4 passes, in the
4882 first two we ignore them, in the third one gather arguments for
4883 GOMP_task_reduction_remap call and in the last pass actually handle
4884 the task reductions. */
4885 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4886 ? 4 : 2); ++pass)
4888 if (pass == 2 && task_reduction_cnt)
4890 tskred_atype
4891 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4892 + task_reduction_cntorig);
4893 tskred_avar = create_tmp_var_raw (tskred_atype);
4894 gimple_add_tmp_var (tskred_avar);
4895 TREE_ADDRESSABLE (tskred_avar) = 1;
4896 task_reduction_cnt_full = task_reduction_cnt;
4897 task_reduction_cntorig_full = task_reduction_cntorig;
4899 else if (pass == 3 && task_reduction_cnt)
4901 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4902 gimple *g
4903 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4904 size_int (task_reduction_cntorig),
4905 build_fold_addr_expr (tskred_avar));
4906 gimple_seq_add_stmt (ilist, g);
4908 if (pass == 3 && task_reduction_other_cnt)
4910 /* For reduction clauses, build
4911 tskred_base = (void *) tskred_temp[2]
4912 + omp_get_thread_num () * tskred_temp[1]
4913 or if tskred_temp[1] is known to be constant, that constant
4914 directly. This is the start of the private reduction copy block
4915 for the current thread. */
4916 tree v = create_tmp_var (integer_type_node);
4917 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4918 gimple *g = gimple_build_call (x, 0);
4919 gimple_call_set_lhs (g, v);
4920 gimple_seq_add_stmt (ilist, g);
4921 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4922 tskred_temp = OMP_CLAUSE_DECL (c);
4923 if (is_taskreg_ctx (ctx))
4924 tskred_temp = lookup_decl (tskred_temp, ctx);
4925 tree v2 = create_tmp_var (sizetype);
4926 g = gimple_build_assign (v2, NOP_EXPR, v);
4927 gimple_seq_add_stmt (ilist, g);
4928 if (ctx->task_reductions[0])
4929 v = fold_convert (sizetype, ctx->task_reductions[0]);
4930 else
4931 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4932 tree v3 = create_tmp_var (sizetype);
4933 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4934 gimple_seq_add_stmt (ilist, g);
4935 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4936 tskred_base = create_tmp_var (ptr_type_node);
4937 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4938 gimple_seq_add_stmt (ilist, g);
4940 task_reduction_cnt = 0;
4941 task_reduction_cntorig = 0;
4942 task_reduction_other_cnt = 0;
4943 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4945 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4946 tree var, new_var;
4947 bool by_ref;
4948 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4949 bool task_reduction_p = false;
4950 bool task_reduction_needs_orig_p = false;
4951 tree cond = NULL_TREE;
4952 tree allocator, allocate_ptr;
4954 switch (c_kind)
4956 case OMP_CLAUSE_PRIVATE:
4957 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4958 continue;
4959 break;
4960 case OMP_CLAUSE_SHARED:
4961 /* Ignore shared directives in teams construct inside
4962 of target construct. */
4963 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4964 && !is_host_teams_ctx (ctx))
4965 continue;
4966 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4968 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4969 || is_global_var (OMP_CLAUSE_DECL (c)));
4970 continue;
4972 case OMP_CLAUSE_FIRSTPRIVATE:
4973 case OMP_CLAUSE_COPYIN:
4974 break;
4975 case OMP_CLAUSE_LINEAR:
4976 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4977 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4978 lastprivate_firstprivate = true;
4979 break;
4980 case OMP_CLAUSE_REDUCTION:
4981 case OMP_CLAUSE_IN_REDUCTION:
4982 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
4983 || is_task_ctx (ctx)
4984 || OMP_CLAUSE_REDUCTION_TASK (c))
4986 task_reduction_p = true;
4987 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4989 task_reduction_other_cnt++;
4990 if (pass == 2)
4991 continue;
4993 else
4994 task_reduction_cnt++;
4995 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4997 var = OMP_CLAUSE_DECL (c);
4998 /* If var is a global variable that isn't privatized
4999 in outer contexts, we don't need to look up the
5000 original address, it is always the address of the
5001 global variable itself. */
5002 if (!DECL_P (var)
5003 || omp_privatize_by_reference (var)
5004 || !is_global_var
5005 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5007 task_reduction_needs_orig_p = true;
5008 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5009 task_reduction_cntorig++;
5013 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5014 reduction_omp_orig_ref = true;
5015 break;
5016 case OMP_CLAUSE__REDUCTEMP_:
5017 if (!is_taskreg_ctx (ctx))
5018 continue;
5019 /* FALLTHRU */
5020 case OMP_CLAUSE__LOOPTEMP_:
5021 /* Handle _looptemp_/_reductemp_ clauses only on
5022 parallel/task. */
5023 if (fd)
5024 continue;
5025 break;
5026 case OMP_CLAUSE_LASTPRIVATE:
5027 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5029 lastprivate_firstprivate = true;
5030 if (pass != 0 || is_taskloop_ctx (ctx))
5031 continue;
5033 /* Even without corresponding firstprivate, if
5034 decl is Fortran allocatable, it needs outer var
5035 reference. */
5036 else if (pass == 0
5037 && lang_hooks.decls.omp_private_outer_ref
5038 (OMP_CLAUSE_DECL (c)))
5039 lastprivate_firstprivate = true;
5040 break;
5041 case OMP_CLAUSE_ALIGNED:
5042 if (pass != 1)
5043 continue;
5044 var = OMP_CLAUSE_DECL (c);
5045 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5046 && !is_global_var (var))
5048 new_var = maybe_lookup_decl (var, ctx);
5049 if (new_var == NULL_TREE)
5050 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5051 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5052 tree alarg = omp_clause_aligned_alignment (c);
5053 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5054 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5055 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5056 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5057 gimplify_and_add (x, ilist);
5059 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5060 && is_global_var (var))
5062 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5063 new_var = lookup_decl (var, ctx);
5064 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5065 t = build_fold_addr_expr_loc (clause_loc, t);
5066 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5067 tree alarg = omp_clause_aligned_alignment (c);
5068 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5069 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5070 t = fold_convert_loc (clause_loc, ptype, t);
5071 x = create_tmp_var (ptype);
5072 t = build2 (MODIFY_EXPR, ptype, x, t);
5073 gimplify_and_add (t, ilist);
5074 t = build_simple_mem_ref_loc (clause_loc, x);
5075 SET_DECL_VALUE_EXPR (new_var, t);
5076 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5078 continue;
5079 case OMP_CLAUSE__CONDTEMP_:
5080 if (is_parallel_ctx (ctx)
5081 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5082 break;
5083 continue;
5084 default:
5085 continue;
5088 if (task_reduction_p != (pass >= 2))
5089 continue;
5091 allocator = NULL_TREE;
5092 allocate_ptr = NULL_TREE;
5093 new_var = var = OMP_CLAUSE_DECL (c);
5094 if ((c_kind == OMP_CLAUSE_REDUCTION
5095 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5096 && TREE_CODE (var) == MEM_REF)
5098 var = TREE_OPERAND (var, 0);
5099 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5100 var = TREE_OPERAND (var, 0);
5101 if (TREE_CODE (var) == INDIRECT_REF
5102 || TREE_CODE (var) == ADDR_EXPR)
5103 var = TREE_OPERAND (var, 0);
5104 if (is_variable_sized (var))
5106 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5107 var = DECL_VALUE_EXPR (var);
5108 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5109 var = TREE_OPERAND (var, 0);
5110 gcc_assert (DECL_P (var));
5112 new_var = var;
5114 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5116 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5117 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5119 else if (c_kind != OMP_CLAUSE_COPYIN)
5120 new_var = lookup_decl (var, ctx);
5122 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5124 if (pass != 0)
5125 continue;
5127 /* C/C++ array section reductions. */
5128 else if ((c_kind == OMP_CLAUSE_REDUCTION
5129 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5130 && var != OMP_CLAUSE_DECL (c))
5132 if (pass == 0)
5133 continue;
5135 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5136 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5138 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5140 tree b = TREE_OPERAND (orig_var, 1);
5141 if (is_omp_target (ctx->stmt))
5142 b = NULL_TREE;
5143 else
5144 b = maybe_lookup_decl (b, ctx);
5145 if (b == NULL)
5147 b = TREE_OPERAND (orig_var, 1);
5148 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5150 if (integer_zerop (bias))
5151 bias = b;
5152 else
5154 bias = fold_convert_loc (clause_loc,
5155 TREE_TYPE (b), bias);
5156 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5157 TREE_TYPE (b), b, bias);
5159 orig_var = TREE_OPERAND (orig_var, 0);
5161 if (pass == 2)
5163 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5164 if (is_global_var (out)
5165 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5166 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5167 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5168 != POINTER_TYPE)))
5169 x = var;
5170 else if (is_omp_target (ctx->stmt))
5171 x = out;
5172 else
5174 bool by_ref = use_pointer_for_field (var, NULL);
5175 x = build_receiver_ref (var, by_ref, ctx);
5176 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5177 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5178 == POINTER_TYPE))
5179 x = build_fold_addr_expr (x);
5181 if (TREE_CODE (orig_var) == INDIRECT_REF)
5182 x = build_simple_mem_ref (x);
5183 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5185 if (var == TREE_OPERAND (orig_var, 0))
5186 x = build_fold_addr_expr (x);
5188 bias = fold_convert (sizetype, bias);
5189 x = fold_convert (ptr_type_node, x);
5190 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5191 TREE_TYPE (x), x, bias);
5192 unsigned cnt = task_reduction_cnt - 1;
5193 if (!task_reduction_needs_orig_p)
5194 cnt += (task_reduction_cntorig_full
5195 - task_reduction_cntorig);
5196 else
5197 cnt = task_reduction_cntorig - 1;
5198 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5199 size_int (cnt), NULL_TREE, NULL_TREE);
5200 gimplify_assign (r, x, ilist);
5201 continue;
5204 if (TREE_CODE (orig_var) == INDIRECT_REF
5205 || TREE_CODE (orig_var) == ADDR_EXPR)
5206 orig_var = TREE_OPERAND (orig_var, 0);
5207 tree d = OMP_CLAUSE_DECL (c);
5208 tree type = TREE_TYPE (d);
5209 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5210 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5211 tree sz = v;
5212 const char *name = get_name (orig_var);
5213 if (pass != 3 && !TREE_CONSTANT (v))
5215 tree t;
5216 if (is_omp_target (ctx->stmt))
5217 t = NULL_TREE;
5218 else
5219 t = maybe_lookup_decl (v, ctx);
5220 if (t)
5221 v = t;
5222 else
5223 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5224 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5225 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5226 TREE_TYPE (v), v,
5227 build_int_cst (TREE_TYPE (v), 1));
5228 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5229 TREE_TYPE (v), t,
5230 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5232 if (pass == 3)
5234 tree xv = create_tmp_var (ptr_type_node);
5235 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5237 unsigned cnt = task_reduction_cnt - 1;
5238 if (!task_reduction_needs_orig_p)
5239 cnt += (task_reduction_cntorig_full
5240 - task_reduction_cntorig);
5241 else
5242 cnt = task_reduction_cntorig - 1;
5243 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5244 size_int (cnt), NULL_TREE, NULL_TREE);
5246 gimple *g = gimple_build_assign (xv, x);
5247 gimple_seq_add_stmt (ilist, g);
5249 else
5251 unsigned int idx = *ctx->task_reduction_map->get (c);
5252 tree off;
5253 if (ctx->task_reductions[1 + idx])
5254 off = fold_convert (sizetype,
5255 ctx->task_reductions[1 + idx]);
5256 else
5257 off = task_reduction_read (ilist, tskred_temp, sizetype,
5258 7 + 3 * idx + 1);
5259 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5260 tskred_base, off);
5261 gimple_seq_add_stmt (ilist, g);
5263 x = fold_convert (build_pointer_type (boolean_type_node),
5264 xv);
5265 if (TREE_CONSTANT (v))
5266 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5267 TYPE_SIZE_UNIT (type));
5268 else
5270 tree t;
5271 if (is_omp_target (ctx->stmt))
5272 t = NULL_TREE;
5273 else
5274 t = maybe_lookup_decl (v, ctx);
5275 if (t)
5276 v = t;
5277 else
5278 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5279 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5280 fb_rvalue);
5281 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5282 TREE_TYPE (v), v,
5283 build_int_cst (TREE_TYPE (v), 1));
5284 t = fold_build2_loc (clause_loc, MULT_EXPR,
5285 TREE_TYPE (v), t,
5286 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5287 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5289 cond = create_tmp_var (TREE_TYPE (x));
5290 gimplify_assign (cond, x, ilist);
5291 x = xv;
5293 else if (lower_private_allocate (var, type, allocator,
5294 allocate_ptr, ilist, ctx,
5295 true,
5296 TREE_CONSTANT (v)
5297 ? TYPE_SIZE_UNIT (type)
5298 : sz))
5299 x = allocate_ptr;
5300 else if (TREE_CONSTANT (v))
5302 x = create_tmp_var_raw (type, name);
5303 gimple_add_tmp_var (x);
5304 TREE_ADDRESSABLE (x) = 1;
5305 x = build_fold_addr_expr_loc (clause_loc, x);
5307 else
5309 tree atmp
5310 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5311 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5312 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5315 tree ptype = build_pointer_type (TREE_TYPE (type));
5316 x = fold_convert_loc (clause_loc, ptype, x);
5317 tree y = create_tmp_var (ptype, name);
5318 gimplify_assign (y, x, ilist);
5319 x = y;
5320 tree yb = y;
5322 if (!integer_zerop (bias))
5324 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5325 bias);
5326 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5328 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5329 pointer_sized_int_node, yb, bias);
5330 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5331 yb = create_tmp_var (ptype, name);
5332 gimplify_assign (yb, x, ilist);
5333 x = yb;
5336 d = TREE_OPERAND (d, 0);
5337 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5338 d = TREE_OPERAND (d, 0);
5339 if (TREE_CODE (d) == ADDR_EXPR)
5341 if (orig_var != var)
5343 gcc_assert (is_variable_sized (orig_var));
5344 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5346 gimplify_assign (new_var, x, ilist);
5347 tree new_orig_var = lookup_decl (orig_var, ctx);
5348 tree t = build_fold_indirect_ref (new_var);
5349 DECL_IGNORED_P (new_var) = 0;
5350 TREE_THIS_NOTRAP (t) = 1;
5351 SET_DECL_VALUE_EXPR (new_orig_var, t);
5352 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5354 else
5356 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5357 build_int_cst (ptype, 0));
5358 SET_DECL_VALUE_EXPR (new_var, x);
5359 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5362 else
5364 gcc_assert (orig_var == var);
5365 if (TREE_CODE (d) == INDIRECT_REF)
5367 x = create_tmp_var (ptype, name);
5368 TREE_ADDRESSABLE (x) = 1;
5369 gimplify_assign (x, yb, ilist);
5370 x = build_fold_addr_expr_loc (clause_loc, x);
5372 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5373 gimplify_assign (new_var, x, ilist);
5375 /* GOMP_taskgroup_reduction_register memsets the whole
5376 array to zero. If the initializer is zero, we don't
5377 need to initialize it again, just mark it as ever
5378 used unconditionally, i.e. cond = true. */
5379 if (cond
5380 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5381 && initializer_zerop (omp_reduction_init (c,
5382 TREE_TYPE (type))))
5384 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5385 boolean_true_node);
5386 gimple_seq_add_stmt (ilist, g);
5387 continue;
5389 tree end = create_artificial_label (UNKNOWN_LOCATION);
5390 if (cond)
5392 gimple *g;
5393 if (!is_parallel_ctx (ctx))
5395 tree condv = create_tmp_var (boolean_type_node);
5396 g = gimple_build_assign (condv,
5397 build_simple_mem_ref (cond));
5398 gimple_seq_add_stmt (ilist, g);
5399 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5400 g = gimple_build_cond (NE_EXPR, condv,
5401 boolean_false_node, end, lab1);
5402 gimple_seq_add_stmt (ilist, g);
5403 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5405 g = gimple_build_assign (build_simple_mem_ref (cond),
5406 boolean_true_node);
5407 gimple_seq_add_stmt (ilist, g);
5410 tree y1 = create_tmp_var (ptype);
5411 gimplify_assign (y1, y, ilist);
5412 tree i2 = NULL_TREE, y2 = NULL_TREE;
5413 tree body2 = NULL_TREE, end2 = NULL_TREE;
5414 tree y3 = NULL_TREE, y4 = NULL_TREE;
5415 if (task_reduction_needs_orig_p)
5417 y3 = create_tmp_var (ptype);
5418 tree ref;
5419 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5420 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5421 size_int (task_reduction_cnt_full
5422 + task_reduction_cntorig - 1),
5423 NULL_TREE, NULL_TREE);
5424 else
5426 unsigned int idx = *ctx->task_reduction_map->get (c);
5427 ref = task_reduction_read (ilist, tskred_temp, ptype,
5428 7 + 3 * idx);
5430 gimplify_assign (y3, ref, ilist);
5432 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5434 if (pass != 3)
5436 y2 = create_tmp_var (ptype);
5437 gimplify_assign (y2, y, ilist);
5439 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5441 tree ref = build_outer_var_ref (var, ctx);
5442 /* For ref build_outer_var_ref already performs this. */
5443 if (TREE_CODE (d) == INDIRECT_REF)
5444 gcc_assert (omp_privatize_by_reference (var));
5445 else if (TREE_CODE (d) == ADDR_EXPR)
5446 ref = build_fold_addr_expr (ref);
5447 else if (omp_privatize_by_reference (var))
5448 ref = build_fold_addr_expr (ref);
5449 ref = fold_convert_loc (clause_loc, ptype, ref);
5450 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5451 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5453 y3 = create_tmp_var (ptype);
5454 gimplify_assign (y3, unshare_expr (ref), ilist);
5456 if (is_simd)
5458 y4 = create_tmp_var (ptype);
5459 gimplify_assign (y4, ref, dlist);
5463 tree i = create_tmp_var (TREE_TYPE (v));
5464 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5465 tree body = create_artificial_label (UNKNOWN_LOCATION);
5466 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5467 if (y2)
5469 i2 = create_tmp_var (TREE_TYPE (v));
5470 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5471 body2 = create_artificial_label (UNKNOWN_LOCATION);
5472 end2 = create_artificial_label (UNKNOWN_LOCATION);
5473 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5475 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5477 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5478 tree decl_placeholder
5479 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5480 SET_DECL_VALUE_EXPR (decl_placeholder,
5481 build_simple_mem_ref (y1));
5482 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5483 SET_DECL_VALUE_EXPR (placeholder,
5484 y3 ? build_simple_mem_ref (y3)
5485 : error_mark_node);
5486 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5487 x = lang_hooks.decls.omp_clause_default_ctor
5488 (c, build_simple_mem_ref (y1),
5489 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5490 if (x)
5491 gimplify_and_add (x, ilist);
5492 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5494 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5495 lower_omp (&tseq, ctx);
5496 gimple_seq_add_seq (ilist, tseq);
5498 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5499 if (is_simd)
5501 SET_DECL_VALUE_EXPR (decl_placeholder,
5502 build_simple_mem_ref (y2));
5503 SET_DECL_VALUE_EXPR (placeholder,
5504 build_simple_mem_ref (y4));
5505 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5506 lower_omp (&tseq, ctx);
5507 gimple_seq_add_seq (dlist, tseq);
5508 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5510 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5511 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5512 if (y2)
5514 x = lang_hooks.decls.omp_clause_dtor
5515 (c, build_simple_mem_ref (y2));
5516 if (x)
5517 gimplify_and_add (x, dlist);
5520 else
5522 x = omp_reduction_init (c, TREE_TYPE (type));
5523 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5525 /* reduction(-:var) sums up the partial results, so it
5526 acts identically to reduction(+:var). */
5527 if (code == MINUS_EXPR)
5528 code = PLUS_EXPR;
5530 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5531 if (is_simd)
5533 x = build2 (code, TREE_TYPE (type),
5534 build_simple_mem_ref (y4),
5535 build_simple_mem_ref (y2));
5536 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5539 gimple *g
5540 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5541 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5542 gimple_seq_add_stmt (ilist, g);
5543 if (y3)
5545 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5546 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5547 gimple_seq_add_stmt (ilist, g);
5549 g = gimple_build_assign (i, PLUS_EXPR, i,
5550 build_int_cst (TREE_TYPE (i), 1));
5551 gimple_seq_add_stmt (ilist, g);
5552 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5553 gimple_seq_add_stmt (ilist, g);
5554 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5555 if (y2)
5557 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5558 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5559 gimple_seq_add_stmt (dlist, g);
5560 if (y4)
5562 g = gimple_build_assign
5563 (y4, POINTER_PLUS_EXPR, y4,
5564 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5565 gimple_seq_add_stmt (dlist, g);
5567 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5568 build_int_cst (TREE_TYPE (i2), 1));
5569 gimple_seq_add_stmt (dlist, g);
5570 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5571 gimple_seq_add_stmt (dlist, g);
5572 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5574 if (allocator)
5576 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5577 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5578 gimple_seq_add_stmt (dlist, g);
5580 continue;
5582 else if (pass == 2)
5584 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5585 if (is_global_var (out))
5586 x = var;
5587 else if (is_omp_target (ctx->stmt))
5588 x = out;
5589 else
5591 bool by_ref = use_pointer_for_field (var, ctx);
5592 x = build_receiver_ref (var, by_ref, ctx);
5594 if (!omp_privatize_by_reference (var))
5595 x = build_fold_addr_expr (x);
5596 x = fold_convert (ptr_type_node, x);
5597 unsigned cnt = task_reduction_cnt - 1;
5598 if (!task_reduction_needs_orig_p)
5599 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5600 else
5601 cnt = task_reduction_cntorig - 1;
5602 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5603 size_int (cnt), NULL_TREE, NULL_TREE);
5604 gimplify_assign (r, x, ilist);
5605 continue;
5607 else if (pass == 3)
5609 tree type = TREE_TYPE (new_var);
5610 if (!omp_privatize_by_reference (var))
5611 type = build_pointer_type (type);
5612 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5614 unsigned cnt = task_reduction_cnt - 1;
5615 if (!task_reduction_needs_orig_p)
5616 cnt += (task_reduction_cntorig_full
5617 - task_reduction_cntorig);
5618 else
5619 cnt = task_reduction_cntorig - 1;
5620 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5621 size_int (cnt), NULL_TREE, NULL_TREE);
5623 else
5625 unsigned int idx = *ctx->task_reduction_map->get (c);
5626 tree off;
5627 if (ctx->task_reductions[1 + idx])
5628 off = fold_convert (sizetype,
5629 ctx->task_reductions[1 + idx]);
5630 else
5631 off = task_reduction_read (ilist, tskred_temp, sizetype,
5632 7 + 3 * idx + 1);
5633 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5634 tskred_base, off);
5636 x = fold_convert (type, x);
5637 tree t;
5638 if (omp_privatize_by_reference (var))
5640 gimplify_assign (new_var, x, ilist);
5641 t = new_var;
5642 new_var = build_simple_mem_ref (new_var);
5644 else
5646 t = create_tmp_var (type);
5647 gimplify_assign (t, x, ilist);
5648 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5649 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5651 t = fold_convert (build_pointer_type (boolean_type_node), t);
5652 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5653 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5654 cond = create_tmp_var (TREE_TYPE (t));
5655 gimplify_assign (cond, t, ilist);
5657 else if (is_variable_sized (var))
5659 /* For variable sized types, we need to allocate the
5660 actual storage here. Call alloca and store the
5661 result in the pointer decl that we created elsewhere. */
5662 if (pass == 0)
5663 continue;
5665 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5667 tree tmp;
5669 ptr = DECL_VALUE_EXPR (new_var);
5670 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5671 ptr = TREE_OPERAND (ptr, 0);
5672 gcc_assert (DECL_P (ptr));
5673 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5675 if (lower_private_allocate (var, new_var, allocator,
5676 allocate_ptr, ilist, ctx,
5677 false, x))
5678 tmp = allocate_ptr;
5679 else
5681 /* void *tmp = __builtin_alloca */
5682 tree atmp
5683 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5684 gcall *stmt
5685 = gimple_build_call (atmp, 2, x,
5686 size_int (DECL_ALIGN (var)));
5687 cfun->calls_alloca = 1;
5688 tmp = create_tmp_var_raw (ptr_type_node);
5689 gimple_add_tmp_var (tmp);
5690 gimple_call_set_lhs (stmt, tmp);
5692 gimple_seq_add_stmt (ilist, stmt);
5695 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5696 gimplify_assign (ptr, x, ilist);
5699 else if (omp_privatize_by_reference (var)
5700 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5701 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5703 /* For references that are being privatized for Fortran,
5704 allocate new backing storage for the new pointer
5705 variable. This allows us to avoid changing all the
5706 code that expects a pointer to something that expects
5707 a direct variable. */
5708 if (pass == 0)
5709 continue;
5711 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5712 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5714 x = build_receiver_ref (var, false, ctx);
5715 if (ctx->allocate_map)
5716 if (tree *allocatep = ctx->allocate_map->get (var))
5718 allocator = *allocatep;
5719 if (TREE_CODE (allocator) == TREE_LIST)
5720 allocator = TREE_PURPOSE (allocator);
5721 if (TREE_CODE (allocator) != INTEGER_CST)
5722 allocator = build_outer_var_ref (allocator, ctx);
5723 allocator = fold_convert (pointer_sized_int_node,
5724 allocator);
5725 allocate_ptr = unshare_expr (x);
5727 if (allocator == NULL_TREE)
5728 x = build_fold_addr_expr_loc (clause_loc, x);
5730 else if (lower_private_allocate (var, new_var, allocator,
5731 allocate_ptr,
5732 ilist, ctx, true, x))
5733 x = allocate_ptr;
5734 else if (TREE_CONSTANT (x))
5736 /* For reduction in SIMD loop, defer adding the
5737 initialization of the reference, because if we decide
5738 to use SIMD array for it, the initilization could cause
5739 expansion ICE. Ditto for other privatization clauses. */
5740 if (is_simd)
5741 x = NULL_TREE;
5742 else
5744 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5745 get_name (var));
5746 gimple_add_tmp_var (x);
5747 TREE_ADDRESSABLE (x) = 1;
5748 x = build_fold_addr_expr_loc (clause_loc, x);
5751 else
5753 tree atmp
5754 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5755 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5756 tree al = size_int (TYPE_ALIGN (rtype));
5757 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5760 if (x)
5762 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5763 gimplify_assign (new_var, x, ilist);
5766 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5768 else if ((c_kind == OMP_CLAUSE_REDUCTION
5769 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5770 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5772 if (pass == 0)
5773 continue;
5775 else if (pass != 0)
5776 continue;
5778 switch (OMP_CLAUSE_CODE (c))
5780 case OMP_CLAUSE_SHARED:
5781 /* Ignore shared directives in teams construct inside
5782 target construct. */
5783 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5784 && !is_host_teams_ctx (ctx))
5785 continue;
5786 /* Shared global vars are just accessed directly. */
5787 if (is_global_var (new_var))
5788 break;
5789 /* For taskloop firstprivate/lastprivate, represented
5790 as firstprivate and shared clause on the task, new_var
5791 is the firstprivate var. */
5792 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5793 break;
5794 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5795 needs to be delayed until after fixup_child_record_type so
5796 that we get the correct type during the dereference. */
5797 by_ref = use_pointer_for_field (var, ctx);
5798 x = build_receiver_ref (var, by_ref, ctx);
5799 SET_DECL_VALUE_EXPR (new_var, x);
5800 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5802 /* ??? If VAR is not passed by reference, and the variable
5803 hasn't been initialized yet, then we'll get a warning for
5804 the store into the omp_data_s structure. Ideally, we'd be
5805 able to notice this and not store anything at all, but
5806 we're generating code too early. Suppress the warning. */
5807 if (!by_ref)
5808 suppress_warning (var, OPT_Wuninitialized);
5809 break;
5811 case OMP_CLAUSE__CONDTEMP_:
5812 if (is_parallel_ctx (ctx))
5814 x = build_receiver_ref (var, false, ctx);
5815 SET_DECL_VALUE_EXPR (new_var, x);
5816 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5818 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5820 x = build_zero_cst (TREE_TYPE (var));
5821 goto do_private;
5823 break;
5825 case OMP_CLAUSE_LASTPRIVATE:
5826 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5827 break;
5828 /* FALLTHRU */
5830 case OMP_CLAUSE_PRIVATE:
5831 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5832 x = build_outer_var_ref (var, ctx);
5833 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5835 if (is_task_ctx (ctx))
5836 x = build_receiver_ref (var, false, ctx);
5837 else
5838 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5840 else
5841 x = NULL;
5842 do_private:
5843 tree nx;
5844 bool copy_ctor;
5845 copy_ctor = false;
5846 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5847 ilist, ctx, false, NULL_TREE);
5848 nx = unshare_expr (new_var);
5849 if (is_simd
5850 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5851 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5852 copy_ctor = true;
5853 if (copy_ctor)
5854 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5855 else
5856 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5857 if (is_simd)
5859 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5860 if ((TREE_ADDRESSABLE (new_var) || nx || y
5861 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5862 && (gimple_omp_for_collapse (ctx->stmt) != 1
5863 || (gimple_omp_for_index (ctx->stmt, 0)
5864 != new_var)))
5865 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5866 || omp_privatize_by_reference (var))
5867 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5868 ivar, lvar))
5870 if (omp_privatize_by_reference (var))
5872 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5873 tree new_vard = TREE_OPERAND (new_var, 0);
5874 gcc_assert (DECL_P (new_vard));
5875 SET_DECL_VALUE_EXPR (new_vard,
5876 build_fold_addr_expr (lvar));
5877 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5880 if (nx)
5882 tree iv = unshare_expr (ivar);
5883 if (copy_ctor)
5884 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5886 else
5887 x = lang_hooks.decls.omp_clause_default_ctor (c,
5891 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5893 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5894 unshare_expr (ivar), x);
5895 nx = x;
5897 if (nx && x)
5898 gimplify_and_add (x, &llist[0]);
5899 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5900 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5902 tree v = new_var;
5903 if (!DECL_P (v))
5905 gcc_assert (TREE_CODE (v) == MEM_REF);
5906 v = TREE_OPERAND (v, 0);
5907 gcc_assert (DECL_P (v));
5909 v = *ctx->lastprivate_conditional_map->get (v);
5910 tree t = create_tmp_var (TREE_TYPE (v));
5911 tree z = build_zero_cst (TREE_TYPE (v));
5912 tree orig_v
5913 = build_outer_var_ref (var, ctx,
5914 OMP_CLAUSE_LASTPRIVATE);
5915 gimple_seq_add_stmt (dlist,
5916 gimple_build_assign (t, z));
5917 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5918 tree civar = DECL_VALUE_EXPR (v);
5919 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5920 civar = unshare_expr (civar);
5921 TREE_OPERAND (civar, 1) = sctx.idx;
5922 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5923 unshare_expr (civar));
5924 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5925 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5926 orig_v, unshare_expr (ivar)));
5927 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5928 civar);
5929 x = build3 (COND_EXPR, void_type_node, cond, x,
5930 void_node);
5931 gimple_seq tseq = NULL;
5932 gimplify_and_add (x, &tseq);
5933 if (ctx->outer)
5934 lower_omp (&tseq, ctx->outer);
5935 gimple_seq_add_seq (&llist[1], tseq);
5937 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5938 && ctx->for_simd_scan_phase)
5940 x = unshare_expr (ivar);
5941 tree orig_v
5942 = build_outer_var_ref (var, ctx,
5943 OMP_CLAUSE_LASTPRIVATE);
5944 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5945 orig_v);
5946 gimplify_and_add (x, &llist[0]);
5948 if (y)
5950 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5951 if (y)
5952 gimplify_and_add (y, &llist[1]);
5954 break;
5956 if (omp_privatize_by_reference (var))
5958 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5959 tree new_vard = TREE_OPERAND (new_var, 0);
5960 gcc_assert (DECL_P (new_vard));
5961 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5962 x = TYPE_SIZE_UNIT (type);
5963 if (TREE_CONSTANT (x))
5965 x = create_tmp_var_raw (type, get_name (var));
5966 gimple_add_tmp_var (x);
5967 TREE_ADDRESSABLE (x) = 1;
5968 x = build_fold_addr_expr_loc (clause_loc, x);
5969 x = fold_convert_loc (clause_loc,
5970 TREE_TYPE (new_vard), x);
5971 gimplify_assign (new_vard, x, ilist);
5975 if (nx)
5976 gimplify_and_add (nx, ilist);
5977 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5978 && is_simd
5979 && ctx->for_simd_scan_phase)
5981 tree orig_v = build_outer_var_ref (var, ctx,
5982 OMP_CLAUSE_LASTPRIVATE);
5983 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5984 orig_v);
5985 gimplify_and_add (x, ilist);
5987 /* FALLTHRU */
5989 do_dtor:
5990 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5991 if (x)
5992 gimplify_and_add (x, dlist);
5993 if (allocator)
5995 if (!is_gimple_val (allocator))
5997 tree avar = create_tmp_var (TREE_TYPE (allocator));
5998 gimplify_assign (avar, allocator, dlist);
5999 allocator = avar;
6001 if (!is_gimple_val (allocate_ptr))
6003 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6004 gimplify_assign (apvar, allocate_ptr, dlist);
6005 allocate_ptr = apvar;
6007 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
6008 gimple *g
6009 = gimple_build_call (f, 2, allocate_ptr, allocator);
6010 gimple_seq_add_stmt (dlist, g);
6012 break;
6014 case OMP_CLAUSE_LINEAR:
6015 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6016 goto do_firstprivate;
6017 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6018 x = NULL;
6019 else
6020 x = build_outer_var_ref (var, ctx);
6021 goto do_private;
6023 case OMP_CLAUSE_FIRSTPRIVATE:
6024 if (is_task_ctx (ctx))
6026 if ((omp_privatize_by_reference (var)
6027 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6028 || is_variable_sized (var))
6029 goto do_dtor;
6030 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
6031 ctx))
6032 || use_pointer_for_field (var, NULL))
6034 x = build_receiver_ref (var, false, ctx);
6035 if (ctx->allocate_map)
6036 if (tree *allocatep = ctx->allocate_map->get (var))
6038 allocator = *allocatep;
6039 if (TREE_CODE (allocator) == TREE_LIST)
6040 allocator = TREE_PURPOSE (allocator);
6041 if (TREE_CODE (allocator) != INTEGER_CST)
6042 allocator = build_outer_var_ref (allocator, ctx);
6043 allocator = fold_convert (pointer_sized_int_node,
6044 allocator);
6045 allocate_ptr = unshare_expr (x);
6046 x = build_simple_mem_ref (x);
6047 TREE_THIS_NOTRAP (x) = 1;
6049 SET_DECL_VALUE_EXPR (new_var, x);
6050 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6051 goto do_dtor;
6054 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6055 && omp_privatize_by_reference (var))
6057 x = build_outer_var_ref (var, ctx);
6058 gcc_assert (TREE_CODE (x) == MEM_REF
6059 && integer_zerop (TREE_OPERAND (x, 1)));
6060 x = TREE_OPERAND (x, 0);
6061 x = lang_hooks.decls.omp_clause_copy_ctor
6062 (c, unshare_expr (new_var), x);
6063 gimplify_and_add (x, ilist);
6064 goto do_dtor;
6066 do_firstprivate:
6067 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6068 ilist, ctx, false, NULL_TREE);
6069 x = build_outer_var_ref (var, ctx);
6070 if (is_simd)
6072 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6073 && gimple_omp_for_combined_into_p (ctx->stmt))
6075 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6076 tree stept = TREE_TYPE (t);
6077 tree ct = omp_find_clause (clauses,
6078 OMP_CLAUSE__LOOPTEMP_);
6079 gcc_assert (ct);
6080 tree l = OMP_CLAUSE_DECL (ct);
6081 tree n1 = fd->loop.n1;
6082 tree step = fd->loop.step;
6083 tree itype = TREE_TYPE (l);
6084 if (POINTER_TYPE_P (itype))
6085 itype = signed_type_for (itype);
6086 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6087 if (TYPE_UNSIGNED (itype)
6088 && fd->loop.cond_code == GT_EXPR)
6089 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6090 fold_build1 (NEGATE_EXPR, itype, l),
6091 fold_build1 (NEGATE_EXPR,
6092 itype, step));
6093 else
6094 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6095 t = fold_build2 (MULT_EXPR, stept,
6096 fold_convert (stept, l), t);
6098 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6100 if (omp_privatize_by_reference (var))
6102 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6103 tree new_vard = TREE_OPERAND (new_var, 0);
6104 gcc_assert (DECL_P (new_vard));
6105 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6106 nx = TYPE_SIZE_UNIT (type);
6107 if (TREE_CONSTANT (nx))
6109 nx = create_tmp_var_raw (type,
6110 get_name (var));
6111 gimple_add_tmp_var (nx);
6112 TREE_ADDRESSABLE (nx) = 1;
6113 nx = build_fold_addr_expr_loc (clause_loc,
6114 nx);
6115 nx = fold_convert_loc (clause_loc,
6116 TREE_TYPE (new_vard),
6117 nx);
6118 gimplify_assign (new_vard, nx, ilist);
6122 x = lang_hooks.decls.omp_clause_linear_ctor
6123 (c, new_var, x, t);
6124 gimplify_and_add (x, ilist);
6125 goto do_dtor;
6128 if (POINTER_TYPE_P (TREE_TYPE (x)))
6129 x = fold_build2 (POINTER_PLUS_EXPR,
6130 TREE_TYPE (x), x, t);
6131 else
6132 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
6135 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6136 || TREE_ADDRESSABLE (new_var)
6137 || omp_privatize_by_reference (var))
6138 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6139 ivar, lvar))
6141 if (omp_privatize_by_reference (var))
6143 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6144 tree new_vard = TREE_OPERAND (new_var, 0);
6145 gcc_assert (DECL_P (new_vard));
6146 SET_DECL_VALUE_EXPR (new_vard,
6147 build_fold_addr_expr (lvar));
6148 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6150 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6152 tree iv = create_tmp_var (TREE_TYPE (new_var));
6153 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6154 gimplify_and_add (x, ilist);
6155 gimple_stmt_iterator gsi
6156 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6157 gassign *g
6158 = gimple_build_assign (unshare_expr (lvar), iv);
6159 gsi_insert_before_without_update (&gsi, g,
6160 GSI_SAME_STMT);
6161 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6162 enum tree_code code = PLUS_EXPR;
6163 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6164 code = POINTER_PLUS_EXPR;
6165 g = gimple_build_assign (iv, code, iv, t);
6166 gsi_insert_before_without_update (&gsi, g,
6167 GSI_SAME_STMT);
6168 break;
6170 x = lang_hooks.decls.omp_clause_copy_ctor
6171 (c, unshare_expr (ivar), x);
6172 gimplify_and_add (x, &llist[0]);
6173 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6174 if (x)
6175 gimplify_and_add (x, &llist[1]);
6176 break;
6178 if (omp_privatize_by_reference (var))
6180 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6181 tree new_vard = TREE_OPERAND (new_var, 0);
6182 gcc_assert (DECL_P (new_vard));
6183 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6184 nx = TYPE_SIZE_UNIT (type);
6185 if (TREE_CONSTANT (nx))
6187 nx = create_tmp_var_raw (type, get_name (var));
6188 gimple_add_tmp_var (nx);
6189 TREE_ADDRESSABLE (nx) = 1;
6190 nx = build_fold_addr_expr_loc (clause_loc, nx);
6191 nx = fold_convert_loc (clause_loc,
6192 TREE_TYPE (new_vard), nx);
6193 gimplify_assign (new_vard, nx, ilist);
6197 x = lang_hooks.decls.omp_clause_copy_ctor
6198 (c, unshare_expr (new_var), x);
6199 gimplify_and_add (x, ilist);
6200 goto do_dtor;
6202 case OMP_CLAUSE__LOOPTEMP_:
6203 case OMP_CLAUSE__REDUCTEMP_:
6204 gcc_assert (is_taskreg_ctx (ctx));
6205 x = build_outer_var_ref (var, ctx);
6206 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6207 gimplify_and_add (x, ilist);
6208 break;
6210 case OMP_CLAUSE_COPYIN:
6211 by_ref = use_pointer_for_field (var, NULL);
6212 x = build_receiver_ref (var, by_ref, ctx);
6213 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6214 append_to_statement_list (x, &copyin_seq);
6215 copyin_by_ref |= by_ref;
6216 break;
6218 case OMP_CLAUSE_REDUCTION:
6219 case OMP_CLAUSE_IN_REDUCTION:
6220 /* OpenACC reductions are initialized using the
6221 GOACC_REDUCTION internal function. */
6222 if (is_gimple_omp_oacc (ctx->stmt))
6223 break;
6224 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6226 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6227 gimple *tseq;
6228 tree ptype = TREE_TYPE (placeholder);
6229 if (cond)
6231 x = error_mark_node;
6232 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6233 && !task_reduction_needs_orig_p)
6234 x = var;
6235 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6237 tree pptype = build_pointer_type (ptype);
6238 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6239 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6240 size_int (task_reduction_cnt_full
6241 + task_reduction_cntorig - 1),
6242 NULL_TREE, NULL_TREE);
6243 else
6245 unsigned int idx
6246 = *ctx->task_reduction_map->get (c);
6247 x = task_reduction_read (ilist, tskred_temp,
6248 pptype, 7 + 3 * idx);
6250 x = fold_convert (pptype, x);
6251 x = build_simple_mem_ref (x);
6254 else
6256 lower_private_allocate (var, new_var, allocator,
6257 allocate_ptr, ilist, ctx, false,
6258 NULL_TREE);
6259 x = build_outer_var_ref (var, ctx);
6261 if (omp_privatize_by_reference (var)
6262 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6263 x = build_fold_addr_expr_loc (clause_loc, x);
6265 SET_DECL_VALUE_EXPR (placeholder, x);
6266 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6267 tree new_vard = new_var;
6268 if (omp_privatize_by_reference (var))
6270 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6271 new_vard = TREE_OPERAND (new_var, 0);
6272 gcc_assert (DECL_P (new_vard));
6274 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6275 if (is_simd
6276 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6277 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6278 rvarp = &rvar;
6279 if (is_simd
6280 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6281 ivar, lvar, rvarp,
6282 &rvar2))
6284 if (new_vard == new_var)
6286 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6287 SET_DECL_VALUE_EXPR (new_var, ivar);
6289 else
6291 SET_DECL_VALUE_EXPR (new_vard,
6292 build_fold_addr_expr (ivar));
6293 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6295 x = lang_hooks.decls.omp_clause_default_ctor
6296 (c, unshare_expr (ivar),
6297 build_outer_var_ref (var, ctx));
6298 if (rvarp && ctx->for_simd_scan_phase)
6300 if (x)
6301 gimplify_and_add (x, &llist[0]);
6302 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6303 if (x)
6304 gimplify_and_add (x, &llist[1]);
6305 break;
6307 else if (rvarp)
6309 if (x)
6311 gimplify_and_add (x, &llist[0]);
6313 tree ivar2 = unshare_expr (lvar);
6314 TREE_OPERAND (ivar2, 1) = sctx.idx;
6315 x = lang_hooks.decls.omp_clause_default_ctor
6316 (c, ivar2, build_outer_var_ref (var, ctx));
6317 gimplify_and_add (x, &llist[0]);
6319 if (rvar2)
6321 x = lang_hooks.decls.omp_clause_default_ctor
6322 (c, unshare_expr (rvar2),
6323 build_outer_var_ref (var, ctx));
6324 gimplify_and_add (x, &llist[0]);
6327 /* For types that need construction, add another
6328 private var which will be default constructed
6329 and optionally initialized with
6330 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6331 loop we want to assign this value instead of
6332 constructing and destructing it in each
6333 iteration. */
6334 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6335 gimple_add_tmp_var (nv);
6336 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6337 ? rvar2
6338 : ivar, 0),
6339 nv);
6340 x = lang_hooks.decls.omp_clause_default_ctor
6341 (c, nv, build_outer_var_ref (var, ctx));
6342 gimplify_and_add (x, ilist);
6344 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6346 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6347 x = DECL_VALUE_EXPR (new_vard);
6348 tree vexpr = nv;
6349 if (new_vard != new_var)
6350 vexpr = build_fold_addr_expr (nv);
6351 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6352 lower_omp (&tseq, ctx);
6353 SET_DECL_VALUE_EXPR (new_vard, x);
6354 gimple_seq_add_seq (ilist, tseq);
6355 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6358 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6359 if (x)
6360 gimplify_and_add (x, dlist);
6363 tree ref = build_outer_var_ref (var, ctx);
6364 x = unshare_expr (ivar);
6365 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6366 ref);
6367 gimplify_and_add (x, &llist[0]);
6369 ref = build_outer_var_ref (var, ctx);
6370 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6371 rvar);
6372 gimplify_and_add (x, &llist[3]);
6374 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6375 if (new_vard == new_var)
6376 SET_DECL_VALUE_EXPR (new_var, lvar);
6377 else
6378 SET_DECL_VALUE_EXPR (new_vard,
6379 build_fold_addr_expr (lvar));
6381 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6382 if (x)
6383 gimplify_and_add (x, &llist[1]);
6385 tree ivar2 = unshare_expr (lvar);
6386 TREE_OPERAND (ivar2, 1) = sctx.idx;
6387 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6388 if (x)
6389 gimplify_and_add (x, &llist[1]);
6391 if (rvar2)
6393 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6394 if (x)
6395 gimplify_and_add (x, &llist[1]);
6397 break;
6399 if (x)
6400 gimplify_and_add (x, &llist[0]);
6401 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6403 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6404 lower_omp (&tseq, ctx);
6405 gimple_seq_add_seq (&llist[0], tseq);
6407 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6408 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6409 lower_omp (&tseq, ctx);
6410 gimple_seq_add_seq (&llist[1], tseq);
6411 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6412 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6413 if (new_vard == new_var)
6414 SET_DECL_VALUE_EXPR (new_var, lvar);
6415 else
6416 SET_DECL_VALUE_EXPR (new_vard,
6417 build_fold_addr_expr (lvar));
6418 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6419 if (x)
6420 gimplify_and_add (x, &llist[1]);
6421 break;
6423 /* If this is a reference to constant size reduction var
6424 with placeholder, we haven't emitted the initializer
6425 for it because it is undesirable if SIMD arrays are used.
6426 But if they aren't used, we need to emit the deferred
6427 initialization now. */
6428 else if (omp_privatize_by_reference (var) && is_simd)
6429 handle_simd_reference (clause_loc, new_vard, ilist);
6431 tree lab2 = NULL_TREE;
6432 if (cond)
6434 gimple *g;
6435 if (!is_parallel_ctx (ctx))
6437 tree condv = create_tmp_var (boolean_type_node);
6438 tree m = build_simple_mem_ref (cond);
6439 g = gimple_build_assign (condv, m);
6440 gimple_seq_add_stmt (ilist, g);
6441 tree lab1
6442 = create_artificial_label (UNKNOWN_LOCATION);
6443 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6444 g = gimple_build_cond (NE_EXPR, condv,
6445 boolean_false_node,
6446 lab2, lab1);
6447 gimple_seq_add_stmt (ilist, g);
6448 gimple_seq_add_stmt (ilist,
6449 gimple_build_label (lab1));
6451 g = gimple_build_assign (build_simple_mem_ref (cond),
6452 boolean_true_node);
6453 gimple_seq_add_stmt (ilist, g);
6455 x = lang_hooks.decls.omp_clause_default_ctor
6456 (c, unshare_expr (new_var),
6457 cond ? NULL_TREE
6458 : build_outer_var_ref (var, ctx));
6459 if (x)
6460 gimplify_and_add (x, ilist);
6462 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6463 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6465 if (ctx->for_simd_scan_phase)
6466 goto do_dtor;
6467 if (x || (!is_simd
6468 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6470 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6471 gimple_add_tmp_var (nv);
6472 ctx->cb.decl_map->put (new_vard, nv);
6473 x = lang_hooks.decls.omp_clause_default_ctor
6474 (c, nv, build_outer_var_ref (var, ctx));
6475 if (x)
6476 gimplify_and_add (x, ilist);
6477 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6479 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6480 tree vexpr = nv;
6481 if (new_vard != new_var)
6482 vexpr = build_fold_addr_expr (nv);
6483 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6484 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6485 lower_omp (&tseq, ctx);
6486 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6487 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6488 gimple_seq_add_seq (ilist, tseq);
6490 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6491 if (is_simd && ctx->scan_exclusive)
6493 tree nv2
6494 = create_tmp_var_raw (TREE_TYPE (new_var));
6495 gimple_add_tmp_var (nv2);
6496 ctx->cb.decl_map->put (nv, nv2);
6497 x = lang_hooks.decls.omp_clause_default_ctor
6498 (c, nv2, build_outer_var_ref (var, ctx));
6499 gimplify_and_add (x, ilist);
6500 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6501 if (x)
6502 gimplify_and_add (x, dlist);
6504 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6505 if (x)
6506 gimplify_and_add (x, dlist);
6508 else if (is_simd
6509 && ctx->scan_exclusive
6510 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6512 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6513 gimple_add_tmp_var (nv2);
6514 ctx->cb.decl_map->put (new_vard, nv2);
6515 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6516 if (x)
6517 gimplify_and_add (x, dlist);
6519 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6520 goto do_dtor;
6523 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6525 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6526 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6527 && is_omp_target (ctx->stmt))
6529 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6530 tree oldv = NULL_TREE;
6531 gcc_assert (d);
6532 if (DECL_HAS_VALUE_EXPR_P (d))
6533 oldv = DECL_VALUE_EXPR (d);
6534 SET_DECL_VALUE_EXPR (d, new_vard);
6535 DECL_HAS_VALUE_EXPR_P (d) = 1;
6536 lower_omp (&tseq, ctx);
6537 if (oldv)
6538 SET_DECL_VALUE_EXPR (d, oldv);
6539 else
6541 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6542 DECL_HAS_VALUE_EXPR_P (d) = 0;
6545 else
6546 lower_omp (&tseq, ctx);
6547 gimple_seq_add_seq (ilist, tseq);
6549 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6550 if (is_simd)
6552 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6553 lower_omp (&tseq, ctx);
6554 gimple_seq_add_seq (dlist, tseq);
6555 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6557 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6558 if (cond)
6560 if (lab2)
6561 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6562 break;
6564 goto do_dtor;
6566 else
6568 x = omp_reduction_init (c, TREE_TYPE (new_var));
6569 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6570 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6572 if (cond)
6574 gimple *g;
6575 tree lab2 = NULL_TREE;
6576 /* GOMP_taskgroup_reduction_register memsets the whole
6577 array to zero. If the initializer is zero, we don't
6578 need to initialize it again, just mark it as ever
6579 used unconditionally, i.e. cond = true. */
6580 if (initializer_zerop (x))
6582 g = gimple_build_assign (build_simple_mem_ref (cond),
6583 boolean_true_node);
6584 gimple_seq_add_stmt (ilist, g);
6585 break;
6588 /* Otherwise, emit
6589 if (!cond) { cond = true; new_var = x; } */
6590 if (!is_parallel_ctx (ctx))
6592 tree condv = create_tmp_var (boolean_type_node);
6593 tree m = build_simple_mem_ref (cond);
6594 g = gimple_build_assign (condv, m);
6595 gimple_seq_add_stmt (ilist, g);
6596 tree lab1
6597 = create_artificial_label (UNKNOWN_LOCATION);
6598 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6599 g = gimple_build_cond (NE_EXPR, condv,
6600 boolean_false_node,
6601 lab2, lab1);
6602 gimple_seq_add_stmt (ilist, g);
6603 gimple_seq_add_stmt (ilist,
6604 gimple_build_label (lab1));
6606 g = gimple_build_assign (build_simple_mem_ref (cond),
6607 boolean_true_node);
6608 gimple_seq_add_stmt (ilist, g);
6609 gimplify_assign (new_var, x, ilist);
6610 if (lab2)
6611 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6612 break;
6615 /* reduction(-:var) sums up the partial results, so it
6616 acts identically to reduction(+:var). */
6617 if (code == MINUS_EXPR)
6618 code = PLUS_EXPR;
6620 bool is_truth_op
6621 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6622 tree new_vard = new_var;
6623 if (is_simd && omp_privatize_by_reference (var))
6625 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6626 new_vard = TREE_OPERAND (new_var, 0);
6627 gcc_assert (DECL_P (new_vard));
6629 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6630 if (is_simd
6631 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6632 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6633 rvarp = &rvar;
6634 if (is_simd
6635 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6636 ivar, lvar, rvarp,
6637 &rvar2))
6639 if (new_vard != new_var)
6641 SET_DECL_VALUE_EXPR (new_vard,
6642 build_fold_addr_expr (lvar));
6643 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6646 tree ref = build_outer_var_ref (var, ctx);
6648 if (rvarp)
6650 if (ctx->for_simd_scan_phase)
6651 break;
6652 gimplify_assign (ivar, ref, &llist[0]);
6653 ref = build_outer_var_ref (var, ctx);
6654 gimplify_assign (ref, rvar, &llist[3]);
6655 break;
6658 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6660 if (sctx.is_simt)
6662 if (!simt_lane)
6663 simt_lane = create_tmp_var (unsigned_type_node);
6664 x = build_call_expr_internal_loc
6665 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6666 TREE_TYPE (ivar), 2, ivar, simt_lane);
6667 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6668 gimplify_assign (ivar, x, &llist[2]);
6670 tree ivar2 = ivar;
6671 tree ref2 = ref;
6672 if (is_truth_op)
6674 tree zero = build_zero_cst (TREE_TYPE (ivar));
6675 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6676 boolean_type_node, ivar,
6677 zero);
6678 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6679 boolean_type_node, ref,
6680 zero);
6682 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6683 if (is_truth_op)
6684 x = fold_convert (TREE_TYPE (ref), x);
6685 ref = build_outer_var_ref (var, ctx);
6686 gimplify_assign (ref, x, &llist[1]);
6689 else
6691 lower_private_allocate (var, new_var, allocator,
6692 allocate_ptr, ilist, ctx,
6693 false, NULL_TREE);
6694 if (omp_privatize_by_reference (var) && is_simd)
6695 handle_simd_reference (clause_loc, new_vard, ilist);
6696 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6697 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6698 break;
6699 gimplify_assign (new_var, x, ilist);
6700 if (is_simd)
6702 tree ref = build_outer_var_ref (var, ctx);
6703 tree new_var2 = new_var;
6704 tree ref2 = ref;
6705 if (is_truth_op)
6707 tree zero = build_zero_cst (TREE_TYPE (new_var));
6708 new_var2
6709 = fold_build2_loc (clause_loc, NE_EXPR,
6710 boolean_type_node, new_var,
6711 zero);
6712 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6713 boolean_type_node, ref,
6714 zero);
6716 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6717 if (is_truth_op)
6718 x = fold_convert (TREE_TYPE (new_var), x);
6719 ref = build_outer_var_ref (var, ctx);
6720 gimplify_assign (ref, x, dlist);
6722 if (allocator)
6723 goto do_dtor;
6726 break;
6728 default:
6729 gcc_unreachable ();
6733 if (tskred_avar)
6735 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6736 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6739 if (known_eq (sctx.max_vf, 1U))
6741 sctx.is_simt = false;
6742 if (ctx->lastprivate_conditional_map)
6744 if (gimple_omp_for_combined_into_p (ctx->stmt))
6746 /* Signal to lower_omp_1 that it should use parent context. */
6747 ctx->combined_into_simd_safelen1 = true;
6748 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6749 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6750 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6752 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6753 omp_context *outer = ctx->outer;
6754 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6755 outer = outer->outer;
6756 tree *v = ctx->lastprivate_conditional_map->get (o);
6757 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6758 tree *pv = outer->lastprivate_conditional_map->get (po);
6759 *v = *pv;
6762 else
6764 /* When not vectorized, treat lastprivate(conditional:) like
6765 normal lastprivate, as there will be just one simd lane
6766 writing the privatized variable. */
6767 delete ctx->lastprivate_conditional_map;
6768 ctx->lastprivate_conditional_map = NULL;
6773 if (nonconst_simd_if)
6775 if (sctx.lane == NULL_TREE)
6777 sctx.idx = create_tmp_var (unsigned_type_node);
6778 sctx.lane = create_tmp_var (unsigned_type_node);
6780 /* FIXME: For now. */
6781 sctx.is_simt = false;
6784 if (sctx.lane || sctx.is_simt)
6786 uid = create_tmp_var (ptr_type_node, "simduid");
6787 /* Don't want uninit warnings on simduid, it is always uninitialized,
6788 but we use it not for the value, but for the DECL_UID only. */
6789 suppress_warning (uid, OPT_Wuninitialized);
6790 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6791 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6792 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6793 gimple_omp_for_set_clauses (ctx->stmt, c);
6795 /* Emit calls denoting privatized variables and initializing a pointer to
6796 structure that holds private variables as fields after ompdevlow pass. */
6797 if (sctx.is_simt)
6799 sctx.simt_eargs[0] = uid;
6800 gimple *g
6801 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6802 gimple_call_set_lhs (g, uid);
6803 gimple_seq_add_stmt (ilist, g);
6804 sctx.simt_eargs.release ();
6806 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6807 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6808 gimple_call_set_lhs (g, simtrec);
6809 gimple_seq_add_stmt (ilist, g);
6811 if (sctx.lane)
6813 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6814 2 + (nonconst_simd_if != NULL),
6815 uid, integer_zero_node,
6816 nonconst_simd_if);
6817 gimple_call_set_lhs (g, sctx.lane);
6818 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6819 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6820 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6821 build_int_cst (unsigned_type_node, 0));
6822 gimple_seq_add_stmt (ilist, g);
6823 if (sctx.lastlane)
6825 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6826 2, uid, sctx.lane);
6827 gimple_call_set_lhs (g, sctx.lastlane);
6828 gimple_seq_add_stmt (dlist, g);
6829 gimple_seq_add_seq (dlist, llist[3]);
6831 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6832 if (llist[2])
6834 tree simt_vf = create_tmp_var (unsigned_type_node);
6835 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6836 gimple_call_set_lhs (g, simt_vf);
6837 gimple_seq_add_stmt (dlist, g);
6839 tree t = build_int_cst (unsigned_type_node, 1);
6840 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6841 gimple_seq_add_stmt (dlist, g);
6843 t = build_int_cst (unsigned_type_node, 0);
6844 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6845 gimple_seq_add_stmt (dlist, g);
6847 tree body = create_artificial_label (UNKNOWN_LOCATION);
6848 tree header = create_artificial_label (UNKNOWN_LOCATION);
6849 tree end = create_artificial_label (UNKNOWN_LOCATION);
6850 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6851 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6853 gimple_seq_add_seq (dlist, llist[2]);
6855 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6856 gimple_seq_add_stmt (dlist, g);
6858 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6859 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6860 gimple_seq_add_stmt (dlist, g);
6862 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6864 for (int i = 0; i < 2; i++)
6865 if (llist[i])
6867 tree vf = create_tmp_var (unsigned_type_node);
6868 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6869 gimple_call_set_lhs (g, vf);
6870 gimple_seq *seq = i == 0 ? ilist : dlist;
6871 gimple_seq_add_stmt (seq, g);
6872 tree t = build_int_cst (unsigned_type_node, 0);
6873 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6874 gimple_seq_add_stmt (seq, g);
6875 tree body = create_artificial_label (UNKNOWN_LOCATION);
6876 tree header = create_artificial_label (UNKNOWN_LOCATION);
6877 tree end = create_artificial_label (UNKNOWN_LOCATION);
6878 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6879 gimple_seq_add_stmt (seq, gimple_build_label (body));
6880 gimple_seq_add_seq (seq, llist[i]);
6881 t = build_int_cst (unsigned_type_node, 1);
6882 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6883 gimple_seq_add_stmt (seq, g);
6884 gimple_seq_add_stmt (seq, gimple_build_label (header));
6885 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6886 gimple_seq_add_stmt (seq, g);
6887 gimple_seq_add_stmt (seq, gimple_build_label (end));
6890 if (sctx.is_simt)
6892 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6893 gimple *g
6894 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6895 gimple_seq_add_stmt (dlist, g);
6898 /* The copyin sequence is not to be executed by the main thread, since
6899 that would result in self-copies. Perhaps not visible to scalars,
6900 but it certainly is to C++ operator=. */
6901 if (copyin_seq)
6903 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6905 x = build2 (NE_EXPR, boolean_type_node, x,
6906 build_int_cst (TREE_TYPE (x), 0));
6907 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6908 gimplify_and_add (x, ilist);
6911 /* If any copyin variable is passed by reference, we must ensure the
6912 master thread doesn't modify it before it is copied over in all
6913 threads. Similarly for variables in both firstprivate and
6914 lastprivate clauses we need to ensure the lastprivate copying
6915 happens after firstprivate copying in all threads. And similarly
6916 for UDRs if initializer expression refers to omp_orig. */
6917 if (copyin_by_ref || lastprivate_firstprivate
6918 || (reduction_omp_orig_ref
6919 && !ctx->scan_inclusive
6920 && !ctx->scan_exclusive))
6922 /* Don't add any barrier for #pragma omp simd or
6923 #pragma omp distribute. */
6924 if (!is_task_ctx (ctx)
6925 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6926 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6927 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6930 /* If max_vf is non-zero, then we can use only a vectorization factor
6931 up to the max_vf we chose. So stick it into the safelen clause. */
6932 if (maybe_ne (sctx.max_vf, 0U))
6934 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6935 OMP_CLAUSE_SAFELEN);
6936 poly_uint64 safe_len;
6937 if (c == NULL_TREE
6938 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6939 && maybe_gt (safe_len, sctx.max_vf)))
6941 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6942 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6943 sctx.max_vf);
6944 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6945 gimple_omp_for_set_clauses (ctx->stmt, c);
6950 /* Create temporary variables for lastprivate(conditional:) implementation
6951 in context CTX with CLAUSES. */
6953 static void
6954 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6956 tree iter_type = NULL_TREE;
6957 tree cond_ptr = NULL_TREE;
6958 tree iter_var = NULL_TREE;
6959 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6960 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6961 tree next = *clauses;
6962 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6963 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6964 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6966 if (is_simd)
6968 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6969 gcc_assert (cc);
6970 if (iter_type == NULL_TREE)
6972 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6973 iter_var = create_tmp_var_raw (iter_type);
6974 DECL_CONTEXT (iter_var) = current_function_decl;
6975 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6976 DECL_CHAIN (iter_var) = ctx->block_vars;
6977 ctx->block_vars = iter_var;
6978 tree c3
6979 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6980 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6981 OMP_CLAUSE_DECL (c3) = iter_var;
6982 OMP_CLAUSE_CHAIN (c3) = *clauses;
6983 *clauses = c3;
6984 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6986 next = OMP_CLAUSE_CHAIN (cc);
6987 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6988 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6989 ctx->lastprivate_conditional_map->put (o, v);
6990 continue;
6992 if (iter_type == NULL)
6994 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6996 struct omp_for_data fd;
6997 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6998 NULL);
6999 iter_type = unsigned_type_for (fd.iter_type);
7001 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
7002 iter_type = unsigned_type_node;
7003 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
7004 if (c2)
7006 cond_ptr
7007 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7008 OMP_CLAUSE_DECL (c2) = cond_ptr;
7010 else
7012 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7013 DECL_CONTEXT (cond_ptr) = current_function_decl;
7014 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7015 DECL_CHAIN (cond_ptr) = ctx->block_vars;
7016 ctx->block_vars = cond_ptr;
7017 c2 = build_omp_clause (UNKNOWN_LOCATION,
7018 OMP_CLAUSE__CONDTEMP_);
7019 OMP_CLAUSE_DECL (c2) = cond_ptr;
7020 OMP_CLAUSE_CHAIN (c2) = *clauses;
7021 *clauses = c2;
7023 iter_var = create_tmp_var_raw (iter_type);
7024 DECL_CONTEXT (iter_var) = current_function_decl;
7025 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7026 DECL_CHAIN (iter_var) = ctx->block_vars;
7027 ctx->block_vars = iter_var;
7028 tree c3
7029 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7030 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7031 OMP_CLAUSE_DECL (c3) = iter_var;
7032 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7033 OMP_CLAUSE_CHAIN (c2) = c3;
7034 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7036 tree v = create_tmp_var_raw (iter_type);
7037 DECL_CONTEXT (v) = current_function_decl;
7038 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7039 DECL_CHAIN (v) = ctx->block_vars;
7040 ctx->block_vars = v;
7041 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7042 ctx->lastprivate_conditional_map->put (o, v);
7047 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7048 both parallel and workshare constructs. PREDICATE may be NULL if it's
7049 always true. BODY_P is the sequence to insert early initialization
7050 if needed, STMT_LIST is where the non-conditional lastprivate handling
7051 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7052 section. */
7054 static void
7055 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7056 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7057 omp_context *ctx)
7059 tree x, c, label = NULL, orig_clauses = clauses;
7060 bool par_clauses = false;
7061 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7062 unsigned HOST_WIDE_INT conditional_off = 0;
7063 gimple_seq post_stmt_list = NULL;
7065 /* Early exit if there are no lastprivate or linear clauses. */
7066 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7067 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7068 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7069 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7070 break;
7071 if (clauses == NULL)
7073 /* If this was a workshare clause, see if it had been combined
7074 with its parallel. In that case, look for the clauses on the
7075 parallel statement itself. */
7076 if (is_parallel_ctx (ctx))
7077 return;
7079 ctx = ctx->outer;
7080 if (ctx == NULL || !is_parallel_ctx (ctx))
7081 return;
7083 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7084 OMP_CLAUSE_LASTPRIVATE);
7085 if (clauses == NULL)
7086 return;
7087 par_clauses = true;
7090 bool maybe_simt = false;
7091 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7092 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7094 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7095 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7096 if (simduid)
7097 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7100 if (predicate)
7102 gcond *stmt;
7103 tree label_true, arm1, arm2;
7104 enum tree_code pred_code = TREE_CODE (predicate);
7106 label = create_artificial_label (UNKNOWN_LOCATION);
7107 label_true = create_artificial_label (UNKNOWN_LOCATION);
7108 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7110 arm1 = TREE_OPERAND (predicate, 0);
7111 arm2 = TREE_OPERAND (predicate, 1);
7112 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7113 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7115 else
7117 arm1 = predicate;
7118 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7119 arm2 = boolean_false_node;
7120 pred_code = NE_EXPR;
7122 if (maybe_simt)
7124 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7125 c = fold_convert (integer_type_node, c);
7126 simtcond = create_tmp_var (integer_type_node);
7127 gimplify_assign (simtcond, c, stmt_list);
7128 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7129 1, simtcond);
7130 c = create_tmp_var (integer_type_node);
7131 gimple_call_set_lhs (g, c);
7132 gimple_seq_add_stmt (stmt_list, g);
7133 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7134 label_true, label);
7136 else
7137 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7138 gimple_seq_add_stmt (stmt_list, stmt);
7139 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7142 tree cond_ptr = NULL_TREE;
7143 for (c = clauses; c ;)
7145 tree var, new_var;
7146 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7147 gimple_seq *this_stmt_list = stmt_list;
7148 tree lab2 = NULL_TREE;
7150 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7151 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7152 && ctx->lastprivate_conditional_map
7153 && !ctx->combined_into_simd_safelen1)
7155 gcc_assert (body_p);
7156 if (simduid)
7157 goto next;
7158 if (cond_ptr == NULL_TREE)
7160 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7161 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7163 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7164 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7165 tree v = *ctx->lastprivate_conditional_map->get (o);
7166 gimplify_assign (v, build_zero_cst (type), body_p);
7167 this_stmt_list = cstmt_list;
7168 tree mem;
7169 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7171 mem = build2 (MEM_REF, type, cond_ptr,
7172 build_int_cst (TREE_TYPE (cond_ptr),
7173 conditional_off));
7174 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7176 else
7177 mem = build4 (ARRAY_REF, type, cond_ptr,
7178 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7179 tree mem2 = copy_node (mem);
7180 gimple_seq seq = NULL;
7181 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7182 gimple_seq_add_seq (this_stmt_list, seq);
7183 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7184 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7185 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7186 gimple_seq_add_stmt (this_stmt_list, g);
7187 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7188 gimplify_assign (mem2, v, this_stmt_list);
7190 else if (predicate
7191 && ctx->combined_into_simd_safelen1
7192 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7193 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7194 && ctx->lastprivate_conditional_map)
7195 this_stmt_list = &post_stmt_list;
7197 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7198 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7199 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7201 var = OMP_CLAUSE_DECL (c);
7202 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7203 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7204 && is_taskloop_ctx (ctx))
7206 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7207 new_var = lookup_decl (var, ctx->outer);
7209 else
7211 new_var = lookup_decl (var, ctx);
7212 /* Avoid uninitialized warnings for lastprivate and
7213 for linear iterators. */
7214 if (predicate
7215 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7216 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7217 suppress_warning (new_var, OPT_Wuninitialized);
7220 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7222 tree val = DECL_VALUE_EXPR (new_var);
7223 if (TREE_CODE (val) == ARRAY_REF
7224 && VAR_P (TREE_OPERAND (val, 0))
7225 && lookup_attribute ("omp simd array",
7226 DECL_ATTRIBUTES (TREE_OPERAND (val,
7227 0))))
7229 if (lastlane == NULL)
7231 lastlane = create_tmp_var (unsigned_type_node);
7232 gcall *g
7233 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7234 2, simduid,
7235 TREE_OPERAND (val, 1));
7236 gimple_call_set_lhs (g, lastlane);
7237 gimple_seq_add_stmt (this_stmt_list, g);
7239 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7240 TREE_OPERAND (val, 0), lastlane,
7241 NULL_TREE, NULL_TREE);
7242 TREE_THIS_NOTRAP (new_var) = 1;
7245 else if (maybe_simt)
7247 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7248 ? DECL_VALUE_EXPR (new_var)
7249 : new_var);
7250 if (simtlast == NULL)
7252 simtlast = create_tmp_var (unsigned_type_node);
7253 gcall *g = gimple_build_call_internal
7254 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7255 gimple_call_set_lhs (g, simtlast);
7256 gimple_seq_add_stmt (this_stmt_list, g);
7258 x = build_call_expr_internal_loc
7259 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7260 TREE_TYPE (val), 2, val, simtlast);
7261 new_var = unshare_expr (new_var);
7262 gimplify_assign (new_var, x, this_stmt_list);
7263 new_var = unshare_expr (new_var);
7266 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7267 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7269 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7270 gimple_seq_add_seq (this_stmt_list,
7271 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7272 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7274 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7275 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7277 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7278 gimple_seq_add_seq (this_stmt_list,
7279 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7280 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7283 x = NULL_TREE;
7284 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7285 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7286 && is_taskloop_ctx (ctx))
7288 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7289 ctx->outer->outer);
7290 if (is_global_var (ovar))
7291 x = ovar;
7293 if (!x)
7294 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7295 if (omp_privatize_by_reference (var))
7296 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7297 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7298 gimplify_and_add (x, this_stmt_list);
7300 if (lab2)
7301 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7304 next:
7305 c = OMP_CLAUSE_CHAIN (c);
7306 if (c == NULL && !par_clauses)
7308 /* If this was a workshare clause, see if it had been combined
7309 with its parallel. In that case, continue looking for the
7310 clauses also on the parallel statement itself. */
7311 if (is_parallel_ctx (ctx))
7312 break;
7314 ctx = ctx->outer;
7315 if (ctx == NULL || !is_parallel_ctx (ctx))
7316 break;
7318 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7319 OMP_CLAUSE_LASTPRIVATE);
7320 par_clauses = true;
7324 if (label)
7325 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7326 gimple_seq_add_seq (stmt_list, post_stmt_list);
7329 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7330 (which might be a placeholder). INNER is true if this is an inner
7331 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7332 join markers. Generate the before-loop forking sequence in
7333 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7334 general form of these sequences is
7336 GOACC_REDUCTION_SETUP
7337 GOACC_FORK
7338 GOACC_REDUCTION_INIT
7340 GOACC_REDUCTION_FINI
7341 GOACC_JOIN
7342 GOACC_REDUCTION_TEARDOWN. */
7344 static void
7345 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7346 gcall *fork, gcall *private_marker, gcall *join,
7347 gimple_seq *fork_seq, gimple_seq *join_seq,
7348 omp_context *ctx)
7350 gimple_seq before_fork = NULL;
7351 gimple_seq after_fork = NULL;
7352 gimple_seq before_join = NULL;
7353 gimple_seq after_join = NULL;
7354 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7355 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7356 unsigned offset = 0;
7358 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7359 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7361 /* No 'reduction' clauses on OpenACC 'kernels'. */
7362 gcc_checking_assert (!is_oacc_kernels (ctx));
7363 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7364 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7366 tree orig = OMP_CLAUSE_DECL (c);
7367 tree var = maybe_lookup_decl (orig, ctx);
7368 tree ref_to_res = NULL_TREE;
7369 tree incoming, outgoing, v1, v2, v3;
7370 bool is_private = false;
7372 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7373 if (rcode == MINUS_EXPR)
7374 rcode = PLUS_EXPR;
7375 else if (rcode == TRUTH_ANDIF_EXPR)
7376 rcode = BIT_AND_EXPR;
7377 else if (rcode == TRUTH_ORIF_EXPR)
7378 rcode = BIT_IOR_EXPR;
7379 tree op = build_int_cst (unsigned_type_node, rcode);
7381 if (!var)
7382 var = orig;
7384 incoming = outgoing = var;
7386 if (!inner)
7388 /* See if an outer construct also reduces this variable. */
7389 omp_context *outer = ctx;
7391 while (omp_context *probe = outer->outer)
7393 enum gimple_code type = gimple_code (probe->stmt);
7394 tree cls;
7396 switch (type)
7398 case GIMPLE_OMP_FOR:
7399 cls = gimple_omp_for_clauses (probe->stmt);
7400 break;
7402 case GIMPLE_OMP_TARGET:
7403 /* No 'reduction' clauses inside OpenACC 'kernels'
7404 regions. */
7405 gcc_checking_assert (!is_oacc_kernels (probe));
7407 if (!is_gimple_omp_offloaded (probe->stmt))
7408 goto do_lookup;
7410 cls = gimple_omp_target_clauses (probe->stmt);
7411 break;
7413 default:
7414 goto do_lookup;
7417 outer = probe;
7418 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7419 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7420 && orig == OMP_CLAUSE_DECL (cls))
7422 incoming = outgoing = lookup_decl (orig, probe);
7423 goto has_outer_reduction;
7425 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7426 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7427 && orig == OMP_CLAUSE_DECL (cls))
7429 is_private = true;
7430 goto do_lookup;
7434 do_lookup:
7435 /* This is the outermost construct with this reduction,
7436 see if there's a mapping for it. */
7437 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7438 && maybe_lookup_field (orig, outer) && !is_private)
7440 ref_to_res = build_receiver_ref (orig, false, outer);
7441 if (omp_privatize_by_reference (orig))
7442 ref_to_res = build_simple_mem_ref (ref_to_res);
7444 tree type = TREE_TYPE (var);
7445 if (POINTER_TYPE_P (type))
7446 type = TREE_TYPE (type);
7448 outgoing = var;
7449 incoming = omp_reduction_init_op (loc, rcode, type);
7451 else
7453 /* Try to look at enclosing contexts for reduction var,
7454 use original if no mapping found. */
7455 tree t = NULL_TREE;
7456 omp_context *c = ctx->outer;
7457 while (c && !t)
7459 t = maybe_lookup_decl (orig, c);
7460 c = c->outer;
7462 incoming = outgoing = (t ? t : orig);
7465 has_outer_reduction:;
7468 if (!ref_to_res)
7469 ref_to_res = integer_zero_node;
7471 if (omp_privatize_by_reference (orig))
7473 tree type = TREE_TYPE (var);
7474 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7476 if (!inner)
7478 tree x = create_tmp_var (TREE_TYPE (type), id);
7479 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7482 v1 = create_tmp_var (type, id);
7483 v2 = create_tmp_var (type, id);
7484 v3 = create_tmp_var (type, id);
7486 gimplify_assign (v1, var, fork_seq);
7487 gimplify_assign (v2, var, fork_seq);
7488 gimplify_assign (v3, var, fork_seq);
7490 var = build_simple_mem_ref (var);
7491 v1 = build_simple_mem_ref (v1);
7492 v2 = build_simple_mem_ref (v2);
7493 v3 = build_simple_mem_ref (v3);
7494 outgoing = build_simple_mem_ref (outgoing);
7496 if (!TREE_CONSTANT (incoming))
7497 incoming = build_simple_mem_ref (incoming);
7499 else
7500 v1 = v2 = v3 = var;
7502 /* Determine position in reduction buffer, which may be used
7503 by target. The parser has ensured that this is not a
7504 variable-sized type. */
7505 fixed_size_mode mode
7506 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7507 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7508 offset = (offset + align - 1) & ~(align - 1);
7509 tree off = build_int_cst (sizetype, offset);
7510 offset += GET_MODE_SIZE (mode);
7512 if (!init_code)
7514 init_code = build_int_cst (integer_type_node,
7515 IFN_GOACC_REDUCTION_INIT);
7516 fini_code = build_int_cst (integer_type_node,
7517 IFN_GOACC_REDUCTION_FINI);
7518 setup_code = build_int_cst (integer_type_node,
7519 IFN_GOACC_REDUCTION_SETUP);
7520 teardown_code = build_int_cst (integer_type_node,
7521 IFN_GOACC_REDUCTION_TEARDOWN);
7524 tree setup_call
7525 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7526 TREE_TYPE (var), 6, setup_code,
7527 unshare_expr (ref_to_res),
7528 incoming, level, op, off);
7529 tree init_call
7530 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7531 TREE_TYPE (var), 6, init_code,
7532 unshare_expr (ref_to_res),
7533 v1, level, op, off);
7534 tree fini_call
7535 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7536 TREE_TYPE (var), 6, fini_code,
7537 unshare_expr (ref_to_res),
7538 v2, level, op, off);
7539 tree teardown_call
7540 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7541 TREE_TYPE (var), 6, teardown_code,
7542 ref_to_res, v3, level, op, off);
7544 gimplify_assign (v1, setup_call, &before_fork);
7545 gimplify_assign (v2, init_call, &after_fork);
7546 gimplify_assign (v3, fini_call, &before_join);
7547 gimplify_assign (outgoing, teardown_call, &after_join);
7550 /* Now stitch things together. */
7551 gimple_seq_add_seq (fork_seq, before_fork);
7552 if (private_marker)
7553 gimple_seq_add_stmt (fork_seq, private_marker);
7554 if (fork)
7555 gimple_seq_add_stmt (fork_seq, fork);
7556 gimple_seq_add_seq (fork_seq, after_fork);
7558 gimple_seq_add_seq (join_seq, before_join);
7559 if (join)
7560 gimple_seq_add_stmt (join_seq, join);
7561 gimple_seq_add_seq (join_seq, after_join);
7564 /* Generate code to implement the REDUCTION clauses, append it
7565 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7566 that should be emitted also inside of the critical section,
7567 in that case clear *CLIST afterwards, otherwise leave it as is
7568 and let the caller emit it itself. */
7570 static void
7571 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7572 gimple_seq *clist, omp_context *ctx)
7574 gimple_seq sub_seq = NULL;
7575 gimple *stmt;
7576 tree x, c;
7577 int count = 0;
7579 /* OpenACC loop reductions are handled elsewhere. */
7580 if (is_gimple_omp_oacc (ctx->stmt))
7581 return;
7583 /* SIMD reductions are handled in lower_rec_input_clauses. */
7584 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7585 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7586 return;
7588 /* inscan reductions are handled elsewhere. */
7589 if (ctx->scan_inclusive || ctx->scan_exclusive)
7590 return;
7592 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7593 update in that case, otherwise use a lock. */
7594 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7595 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7596 && !OMP_CLAUSE_REDUCTION_TASK (c))
7598 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7599 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7601 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7602 count = -1;
7603 break;
7605 count++;
7608 if (count == 0)
7609 return;
7611 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7613 tree var, ref, new_var, orig_var;
7614 enum tree_code code;
7615 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7617 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7618 || OMP_CLAUSE_REDUCTION_TASK (c))
7619 continue;
7621 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7622 orig_var = var = OMP_CLAUSE_DECL (c);
7623 if (TREE_CODE (var) == MEM_REF)
7625 var = TREE_OPERAND (var, 0);
7626 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7627 var = TREE_OPERAND (var, 0);
7628 if (TREE_CODE (var) == ADDR_EXPR)
7629 var = TREE_OPERAND (var, 0);
7630 else
7632 /* If this is a pointer or referenced based array
7633 section, the var could be private in the outer
7634 context e.g. on orphaned loop construct. Pretend this
7635 is private variable's outer reference. */
7636 ccode = OMP_CLAUSE_PRIVATE;
7637 if (TREE_CODE (var) == INDIRECT_REF)
7638 var = TREE_OPERAND (var, 0);
7640 orig_var = var;
7641 if (is_variable_sized (var))
7643 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7644 var = DECL_VALUE_EXPR (var);
7645 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7646 var = TREE_OPERAND (var, 0);
7647 gcc_assert (DECL_P (var));
7650 new_var = lookup_decl (var, ctx);
7651 if (var == OMP_CLAUSE_DECL (c)
7652 && omp_privatize_by_reference (var))
7653 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7654 ref = build_outer_var_ref (var, ctx, ccode);
7655 code = OMP_CLAUSE_REDUCTION_CODE (c);
7657 /* reduction(-:var) sums up the partial results, so it acts
7658 identically to reduction(+:var). */
7659 if (code == MINUS_EXPR)
7660 code = PLUS_EXPR;
7662 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7663 if (count == 1)
7665 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7667 addr = save_expr (addr);
7668 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7669 tree new_var2 = new_var;
7670 tree ref2 = ref;
7671 if (is_truth_op)
7673 tree zero = build_zero_cst (TREE_TYPE (new_var));
7674 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7675 boolean_type_node, new_var, zero);
7676 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7677 ref, zero);
7679 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7680 new_var2);
7681 if (is_truth_op)
7682 x = fold_convert (TREE_TYPE (new_var), x);
7683 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7684 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7685 gimplify_and_add (x, stmt_seqp);
7686 return;
7688 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7690 tree d = OMP_CLAUSE_DECL (c);
7691 tree type = TREE_TYPE (d);
7692 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7693 tree i = create_tmp_var (TREE_TYPE (v));
7694 tree ptype = build_pointer_type (TREE_TYPE (type));
7695 tree bias = TREE_OPERAND (d, 1);
7696 d = TREE_OPERAND (d, 0);
7697 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7699 tree b = TREE_OPERAND (d, 1);
7700 b = maybe_lookup_decl (b, ctx);
7701 if (b == NULL)
7703 b = TREE_OPERAND (d, 1);
7704 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7706 if (integer_zerop (bias))
7707 bias = b;
7708 else
7710 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7711 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7712 TREE_TYPE (b), b, bias);
7714 d = TREE_OPERAND (d, 0);
7716 /* For ref build_outer_var_ref already performs this, so
7717 only new_var needs a dereference. */
7718 if (TREE_CODE (d) == INDIRECT_REF)
7720 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7721 gcc_assert (omp_privatize_by_reference (var)
7722 && var == orig_var);
7724 else if (TREE_CODE (d) == ADDR_EXPR)
7726 if (orig_var == var)
7728 new_var = build_fold_addr_expr (new_var);
7729 ref = build_fold_addr_expr (ref);
7732 else
7734 gcc_assert (orig_var == var);
7735 if (omp_privatize_by_reference (var))
7736 ref = build_fold_addr_expr (ref);
7738 if (DECL_P (v))
7740 tree t = maybe_lookup_decl (v, ctx);
7741 if (t)
7742 v = t;
7743 else
7744 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7745 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7747 if (!integer_zerop (bias))
7749 bias = fold_convert_loc (clause_loc, sizetype, bias);
7750 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7751 TREE_TYPE (new_var), new_var,
7752 unshare_expr (bias));
7753 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7754 TREE_TYPE (ref), ref, bias);
7756 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7757 ref = fold_convert_loc (clause_loc, ptype, ref);
7758 tree m = create_tmp_var (ptype);
7759 gimplify_assign (m, new_var, stmt_seqp);
7760 new_var = m;
7761 m = create_tmp_var (ptype);
7762 gimplify_assign (m, ref, stmt_seqp);
7763 ref = m;
7764 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7765 tree body = create_artificial_label (UNKNOWN_LOCATION);
7766 tree end = create_artificial_label (UNKNOWN_LOCATION);
7767 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7768 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7769 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7770 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7772 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7773 tree decl_placeholder
7774 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7775 SET_DECL_VALUE_EXPR (placeholder, out);
7776 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7777 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7778 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7779 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7780 gimple_seq_add_seq (&sub_seq,
7781 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7782 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7783 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7784 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7786 else
7788 tree out2 = out;
7789 tree priv2 = priv;
7790 if (is_truth_op)
7792 tree zero = build_zero_cst (TREE_TYPE (out));
7793 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7794 boolean_type_node, out, zero);
7795 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7796 boolean_type_node, priv, zero);
7798 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7799 if (is_truth_op)
7800 x = fold_convert (TREE_TYPE (out), x);
7801 out = unshare_expr (out);
7802 gimplify_assign (out, x, &sub_seq);
7804 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7805 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7806 gimple_seq_add_stmt (&sub_seq, g);
7807 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7808 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7809 gimple_seq_add_stmt (&sub_seq, g);
7810 g = gimple_build_assign (i, PLUS_EXPR, i,
7811 build_int_cst (TREE_TYPE (i), 1));
7812 gimple_seq_add_stmt (&sub_seq, g);
7813 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7814 gimple_seq_add_stmt (&sub_seq, g);
7815 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7817 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7819 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7821 if (omp_privatize_by_reference (var)
7822 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7823 TREE_TYPE (ref)))
7824 ref = build_fold_addr_expr_loc (clause_loc, ref);
7825 SET_DECL_VALUE_EXPR (placeholder, ref);
7826 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7827 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7828 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7829 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7830 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7832 else
7834 tree new_var2 = new_var;
7835 tree ref2 = ref;
7836 if (is_truth_op)
7838 tree zero = build_zero_cst (TREE_TYPE (new_var));
7839 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7840 boolean_type_node, new_var, zero);
7841 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7842 ref, zero);
7844 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7845 if (is_truth_op)
7846 x = fold_convert (TREE_TYPE (new_var), x);
7847 ref = build_outer_var_ref (var, ctx);
7848 gimplify_assign (ref, x, &sub_seq);
7852 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7854 gimple_seq_add_stmt (stmt_seqp, stmt);
7856 gimple_seq_add_seq (stmt_seqp, sub_seq);
7858 if (clist)
7860 gimple_seq_add_seq (stmt_seqp, *clist);
7861 *clist = NULL;
7864 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7866 gimple_seq_add_stmt (stmt_seqp, stmt);
7870 /* Generate code to implement the COPYPRIVATE clauses. */
7872 static void
7873 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7874 omp_context *ctx)
7876 tree c;
7878 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7880 tree var, new_var, ref, x;
7881 bool by_ref;
7882 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7884 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7885 continue;
7887 var = OMP_CLAUSE_DECL (c);
7888 by_ref = use_pointer_for_field (var, NULL);
7890 ref = build_sender_ref (var, ctx);
7891 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7892 if (by_ref)
7894 x = build_fold_addr_expr_loc (clause_loc, new_var);
7895 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7897 gimplify_assign (ref, x, slist);
7899 ref = build_receiver_ref (var, false, ctx);
7900 if (by_ref)
7902 ref = fold_convert_loc (clause_loc,
7903 build_pointer_type (TREE_TYPE (new_var)),
7904 ref);
7905 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7907 if (omp_privatize_by_reference (var))
7909 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7910 ref = build_simple_mem_ref_loc (clause_loc, ref);
7911 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7913 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7914 gimplify_and_add (x, rlist);
7919 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7920 and REDUCTION from the sender (aka parent) side. */
7922 static void
7923 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7924 omp_context *ctx)
7926 tree c, t;
7927 int ignored_looptemp = 0;
7928 bool is_taskloop = false;
7930 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7931 by GOMP_taskloop. */
7932 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7934 ignored_looptemp = 2;
7935 is_taskloop = true;
7938 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7940 tree val, ref, x, var;
7941 bool by_ref, do_in = false, do_out = false;
7942 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7944 switch (OMP_CLAUSE_CODE (c))
7946 case OMP_CLAUSE_PRIVATE:
7947 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7948 break;
7949 continue;
7950 case OMP_CLAUSE_FIRSTPRIVATE:
7951 case OMP_CLAUSE_COPYIN:
7952 case OMP_CLAUSE_LASTPRIVATE:
7953 case OMP_CLAUSE_IN_REDUCTION:
7954 case OMP_CLAUSE__REDUCTEMP_:
7955 break;
7956 case OMP_CLAUSE_REDUCTION:
7957 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7958 continue;
7959 break;
7960 case OMP_CLAUSE_SHARED:
7961 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7962 break;
7963 continue;
7964 case OMP_CLAUSE__LOOPTEMP_:
7965 if (ignored_looptemp)
7967 ignored_looptemp--;
7968 continue;
7970 break;
7971 default:
7972 continue;
7975 val = OMP_CLAUSE_DECL (c);
7976 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7977 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7978 && TREE_CODE (val) == MEM_REF)
7980 val = TREE_OPERAND (val, 0);
7981 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7982 val = TREE_OPERAND (val, 0);
7983 if (TREE_CODE (val) == INDIRECT_REF
7984 || TREE_CODE (val) == ADDR_EXPR)
7985 val = TREE_OPERAND (val, 0);
7986 if (is_variable_sized (val))
7987 continue;
7990 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7991 outer taskloop region. */
7992 omp_context *ctx_for_o = ctx;
7993 if (is_taskloop
7994 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7995 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7996 ctx_for_o = ctx->outer;
7998 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
8000 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8001 && is_global_var (var)
8002 && (val == OMP_CLAUSE_DECL (c)
8003 || !is_task_ctx (ctx)
8004 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8005 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8006 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8007 != POINTER_TYPE)))))
8008 continue;
8010 t = omp_member_access_dummy_var (var);
8011 if (t)
8013 var = DECL_VALUE_EXPR (var);
8014 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
8015 if (o != t)
8016 var = unshare_and_remap (var, t, o);
8017 else
8018 var = unshare_expr (var);
8021 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8023 /* Handle taskloop firstprivate/lastprivate, where the
8024 lastprivate on GIMPLE_OMP_TASK is represented as
8025 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8026 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
8027 x = omp_build_component_ref (ctx->sender_decl, f);
8028 if (use_pointer_for_field (val, ctx))
8029 var = build_fold_addr_expr (var);
8030 gimplify_assign (x, var, ilist);
8031 DECL_ABSTRACT_ORIGIN (f) = NULL;
8032 continue;
8035 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8036 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8037 || val == OMP_CLAUSE_DECL (c))
8038 && is_variable_sized (val))
8039 continue;
8040 by_ref = use_pointer_for_field (val, NULL);
8042 switch (OMP_CLAUSE_CODE (c))
8044 case OMP_CLAUSE_FIRSTPRIVATE:
8045 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8046 && !by_ref
8047 && is_task_ctx (ctx))
8048 suppress_warning (var);
8049 do_in = true;
8050 break;
8052 case OMP_CLAUSE_PRIVATE:
8053 case OMP_CLAUSE_COPYIN:
8054 case OMP_CLAUSE__LOOPTEMP_:
8055 case OMP_CLAUSE__REDUCTEMP_:
8056 do_in = true;
8057 break;
8059 case OMP_CLAUSE_LASTPRIVATE:
8060 if (by_ref || omp_privatize_by_reference (val))
8062 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8063 continue;
8064 do_in = true;
8066 else
8068 do_out = true;
8069 if (lang_hooks.decls.omp_private_outer_ref (val))
8070 do_in = true;
8072 break;
8074 case OMP_CLAUSE_REDUCTION:
8075 case OMP_CLAUSE_IN_REDUCTION:
8076 do_in = true;
8077 if (val == OMP_CLAUSE_DECL (c))
8079 if (is_task_ctx (ctx))
8080 by_ref = use_pointer_for_field (val, ctx);
8081 else
8082 do_out = !(by_ref || omp_privatize_by_reference (val));
8084 else
8085 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8086 break;
8088 default:
8089 gcc_unreachable ();
8092 if (do_in)
8094 ref = build_sender_ref (val, ctx);
8095 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8096 gimplify_assign (ref, x, ilist);
8097 if (is_task_ctx (ctx))
8098 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8101 if (do_out)
8103 ref = build_sender_ref (val, ctx);
8104 gimplify_assign (var, ref, olist);
8109 /* Generate code to implement SHARED from the sender (aka parent)
8110 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8111 list things that got automatically shared. */
8113 static void
8114 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8116 tree var, ovar, nvar, t, f, x, record_type;
8118 if (ctx->record_type == NULL)
8119 return;
8121 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8122 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8124 ovar = DECL_ABSTRACT_ORIGIN (f);
8125 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8126 continue;
8128 nvar = maybe_lookup_decl (ovar, ctx);
8129 if (!nvar
8130 || !DECL_HAS_VALUE_EXPR_P (nvar)
8131 || (ctx->allocate_map
8132 && ctx->allocate_map->get (ovar)))
8133 continue;
8135 /* If CTX is a nested parallel directive. Find the immediately
8136 enclosing parallel or workshare construct that contains a
8137 mapping for OVAR. */
8138 var = lookup_decl_in_outer_ctx (ovar, ctx);
8140 t = omp_member_access_dummy_var (var);
8141 if (t)
8143 var = DECL_VALUE_EXPR (var);
8144 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8145 if (o != t)
8146 var = unshare_and_remap (var, t, o);
8147 else
8148 var = unshare_expr (var);
8151 if (use_pointer_for_field (ovar, ctx))
8153 x = build_sender_ref (ovar, ctx);
8154 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8155 && TREE_TYPE (f) == TREE_TYPE (ovar))
8157 gcc_assert (is_parallel_ctx (ctx)
8158 && DECL_ARTIFICIAL (ovar));
8159 /* _condtemp_ clause. */
8160 var = build_constructor (TREE_TYPE (x), NULL);
8162 else
8163 var = build_fold_addr_expr (var);
8164 gimplify_assign (x, var, ilist);
8166 else
8168 x = build_sender_ref (ovar, ctx);
8169 gimplify_assign (x, var, ilist);
8171 if (!TREE_READONLY (var)
8172 /* We don't need to receive a new reference to a result
8173 or parm decl. In fact we may not store to it as we will
8174 invalidate any pending RSO and generate wrong gimple
8175 during inlining. */
8176 && !((TREE_CODE (var) == RESULT_DECL
8177 || TREE_CODE (var) == PARM_DECL)
8178 && DECL_BY_REFERENCE (var)))
8180 x = build_sender_ref (ovar, ctx);
8181 gimplify_assign (var, x, olist);
8187 /* Emit an OpenACC head marker call, encapulating the partitioning and
8188 other information that must be processed by the target compiler.
8189 Return the maximum number of dimensions the associated loop might
8190 be partitioned over. */
8192 static unsigned
8193 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8194 gimple_seq *seq, omp_context *ctx)
8196 unsigned levels = 0;
8197 unsigned tag = 0;
8198 tree gang_static = NULL_TREE;
8199 auto_vec<tree, 5> args;
8201 args.quick_push (build_int_cst
8202 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8203 args.quick_push (ddvar);
8204 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8206 switch (OMP_CLAUSE_CODE (c))
8208 case OMP_CLAUSE_GANG:
8209 tag |= OLF_DIM_GANG;
8210 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8211 /* static:* is represented by -1, and we can ignore it, as
8212 scheduling is always static. */
8213 if (gang_static && integer_minus_onep (gang_static))
8214 gang_static = NULL_TREE;
8215 levels++;
8216 break;
8218 case OMP_CLAUSE_WORKER:
8219 tag |= OLF_DIM_WORKER;
8220 levels++;
8221 break;
8223 case OMP_CLAUSE_VECTOR:
8224 tag |= OLF_DIM_VECTOR;
8225 levels++;
8226 break;
8228 case OMP_CLAUSE_SEQ:
8229 tag |= OLF_SEQ;
8230 break;
8232 case OMP_CLAUSE_AUTO:
8233 tag |= OLF_AUTO;
8234 break;
8236 case OMP_CLAUSE_INDEPENDENT:
8237 tag |= OLF_INDEPENDENT;
8238 break;
8240 case OMP_CLAUSE_TILE:
8241 tag |= OLF_TILE;
8242 break;
8244 default:
8245 continue;
8249 if (gang_static)
8251 if (DECL_P (gang_static))
8252 gang_static = build_outer_var_ref (gang_static, ctx);
8253 tag |= OLF_GANG_STATIC;
8256 omp_context *tgt = enclosing_target_ctx (ctx);
8257 if (!tgt || is_oacc_parallel_or_serial (tgt))
8259 else if (is_oacc_kernels (tgt))
8260 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8261 gcc_unreachable ();
8262 else if (is_oacc_kernels_decomposed_part (tgt))
8264 else
8265 gcc_unreachable ();
8267 /* In a parallel region, loops are implicitly INDEPENDENT. */
8268 if (!tgt || is_oacc_parallel_or_serial (tgt))
8269 tag |= OLF_INDEPENDENT;
8271 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8272 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8273 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8275 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8276 gcc_assert (!(tag & OLF_AUTO));
8279 if (tag & OLF_TILE)
8280 /* Tiling could use all 3 levels. */
8281 levels = 3;
8282 else
8284 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8285 Ensure at least one level, or 2 for possible auto
8286 partitioning */
8287 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8288 << OLF_DIM_BASE) | OLF_SEQ));
8290 if (levels < 1u + maybe_auto)
8291 levels = 1u + maybe_auto;
8294 args.quick_push (build_int_cst (integer_type_node, levels));
8295 args.quick_push (build_int_cst (integer_type_node, tag));
8296 if (gang_static)
8297 args.quick_push (gang_static);
8299 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8300 gimple_set_location (call, loc);
8301 gimple_set_lhs (call, ddvar);
8302 gimple_seq_add_stmt (seq, call);
8304 return levels;
8307 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8308 partitioning level of the enclosed region. */
8310 static void
8311 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8312 tree tofollow, gimple_seq *seq)
8314 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8315 : IFN_UNIQUE_OACC_TAIL_MARK);
8316 tree marker = build_int_cst (integer_type_node, marker_kind);
8317 int nargs = 2 + (tofollow != NULL_TREE);
8318 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8319 marker, ddvar, tofollow);
8320 gimple_set_location (call, loc);
8321 gimple_set_lhs (call, ddvar);
8322 gimple_seq_add_stmt (seq, call);
8325 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8326 the loop clauses, from which we extract reductions. Initialize
8327 HEAD and TAIL. */
8329 static void
8330 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8331 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8333 bool inner = false;
8334 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8335 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8337 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8339 if (private_marker)
8341 gimple_set_location (private_marker, loc);
8342 gimple_call_set_lhs (private_marker, ddvar);
8343 gimple_call_set_arg (private_marker, 1, ddvar);
8346 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8347 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8349 gcc_assert (count);
8350 for (unsigned done = 1; count; count--, done++)
8352 gimple_seq fork_seq = NULL;
8353 gimple_seq join_seq = NULL;
8355 tree place = build_int_cst (integer_type_node, -1);
8356 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8357 fork_kind, ddvar, place);
8358 gimple_set_location (fork, loc);
8359 gimple_set_lhs (fork, ddvar);
8361 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8362 join_kind, ddvar, place);
8363 gimple_set_location (join, loc);
8364 gimple_set_lhs (join, ddvar);
8366 /* Mark the beginning of this level sequence. */
8367 if (inner)
8368 lower_oacc_loop_marker (loc, ddvar, true,
8369 build_int_cst (integer_type_node, count),
8370 &fork_seq);
8371 lower_oacc_loop_marker (loc, ddvar, false,
8372 build_int_cst (integer_type_node, done),
8373 &join_seq);
8375 lower_oacc_reductions (loc, clauses, place, inner,
8376 fork, (count == 1) ? private_marker : NULL,
8377 join, &fork_seq, &join_seq, ctx);
8379 /* Append this level to head. */
8380 gimple_seq_add_seq (head, fork_seq);
8381 /* Prepend it to tail. */
8382 gimple_seq_add_seq (&join_seq, *tail);
8383 *tail = join_seq;
8385 inner = true;
8388 /* Mark the end of the sequence. */
8389 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8390 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8393 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8394 catch handler and return it. This prevents programs from violating the
8395 structured block semantics with throws. */
8397 static gimple_seq
8398 maybe_catch_exception (gimple_seq body)
8400 gimple *g;
8401 tree decl;
8403 if (!flag_exceptions)
8404 return body;
8406 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8407 decl = lang_hooks.eh_protect_cleanup_actions ();
8408 else
8409 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8411 g = gimple_build_eh_must_not_throw (decl);
8412 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8413 GIMPLE_TRY_CATCH);
8415 return gimple_seq_alloc_with_stmt (g);
8419 /* Routines to lower OMP directives into OMP-GIMPLE. */
8421 /* If ctx is a worksharing context inside of a cancellable parallel
8422 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8423 and conditional branch to parallel's cancel_label to handle
8424 cancellation in the implicit barrier. */
8426 static void
8427 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8428 gimple_seq *body)
8430 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8431 if (gimple_omp_return_nowait_p (omp_return))
8432 return;
8433 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8434 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8435 && outer->cancellable)
8437 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8438 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8439 tree lhs = create_tmp_var (c_bool_type);
8440 gimple_omp_return_set_lhs (omp_return, lhs);
8441 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8442 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8443 fold_convert (c_bool_type,
8444 boolean_false_node),
8445 outer->cancel_label, fallthru_label);
8446 gimple_seq_add_stmt (body, g);
8447 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8449 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8450 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8451 return;
8454 /* Find the first task_reduction or reduction clause or return NULL
8455 if there are none. */
8457 static inline tree
8458 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8459 enum omp_clause_code ccode)
8461 while (1)
8463 clauses = omp_find_clause (clauses, ccode);
8464 if (clauses == NULL_TREE)
8465 return NULL_TREE;
8466 if (ccode != OMP_CLAUSE_REDUCTION
8467 || code == OMP_TASKLOOP
8468 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8469 return clauses;
8470 clauses = OMP_CLAUSE_CHAIN (clauses);
8474 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8475 gimple_seq *, gimple_seq *);
8477 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8478 CTX is the enclosing OMP context for the current statement. */
8480 static void
8481 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8483 tree block, control;
8484 gimple_stmt_iterator tgsi;
8485 gomp_sections *stmt;
8486 gimple *t;
8487 gbind *new_stmt, *bind;
8488 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8490 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8492 push_gimplify_context ();
8494 dlist = NULL;
8495 ilist = NULL;
8497 tree rclauses
8498 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8499 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8500 tree rtmp = NULL_TREE;
8501 if (rclauses)
8503 tree type = build_pointer_type (pointer_sized_int_node);
8504 tree temp = create_tmp_var (type);
8505 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8506 OMP_CLAUSE_DECL (c) = temp;
8507 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8508 gimple_omp_sections_set_clauses (stmt, c);
8509 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8510 gimple_omp_sections_clauses (stmt),
8511 &ilist, &tred_dlist);
8512 rclauses = c;
8513 rtmp = make_ssa_name (type);
8514 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8517 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8518 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8520 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8521 &ilist, &dlist, ctx, NULL);
8523 control = create_tmp_var (unsigned_type_node, ".section");
8524 gimple_omp_sections_set_control (stmt, control);
8526 new_body = gimple_omp_body (stmt);
8527 gimple_omp_set_body (stmt, NULL);
8528 tgsi = gsi_start (new_body);
8529 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8531 omp_context *sctx;
8532 gimple *sec_start;
8534 sec_start = gsi_stmt (tgsi);
8535 sctx = maybe_lookup_ctx (sec_start);
8536 gcc_assert (sctx);
8538 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8539 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8540 GSI_CONTINUE_LINKING);
8541 gimple_omp_set_body (sec_start, NULL);
8543 if (gsi_one_before_end_p (tgsi))
8545 gimple_seq l = NULL;
8546 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8547 &ilist, &l, &clist, ctx);
8548 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8549 gimple_omp_section_set_last (sec_start);
8552 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8553 GSI_CONTINUE_LINKING);
8556 block = make_node (BLOCK);
8557 bind = gimple_build_bind (NULL, new_body, block);
8559 olist = NULL;
8560 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8561 &clist, ctx);
8562 if (clist)
8564 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8565 gcall *g = gimple_build_call (fndecl, 0);
8566 gimple_seq_add_stmt (&olist, g);
8567 gimple_seq_add_seq (&olist, clist);
8568 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8569 g = gimple_build_call (fndecl, 0);
8570 gimple_seq_add_stmt (&olist, g);
8573 block = make_node (BLOCK);
8574 new_stmt = gimple_build_bind (NULL, NULL, block);
8575 gsi_replace (gsi_p, new_stmt, true);
8577 pop_gimplify_context (new_stmt);
8578 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8579 BLOCK_VARS (block) = gimple_bind_vars (bind);
8580 if (BLOCK_VARS (block))
8581 TREE_USED (block) = 1;
8583 new_body = NULL;
8584 gimple_seq_add_seq (&new_body, ilist);
8585 gimple_seq_add_stmt (&new_body, stmt);
8586 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8587 gimple_seq_add_stmt (&new_body, bind);
8589 t = gimple_build_omp_continue (control, control);
8590 gimple_seq_add_stmt (&new_body, t);
8592 gimple_seq_add_seq (&new_body, olist);
8593 if (ctx->cancellable)
8594 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8595 gimple_seq_add_seq (&new_body, dlist);
8597 new_body = maybe_catch_exception (new_body);
8599 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8600 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8601 t = gimple_build_omp_return (nowait);
8602 gimple_seq_add_stmt (&new_body, t);
8603 gimple_seq_add_seq (&new_body, tred_dlist);
8604 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8606 if (rclauses)
8607 OMP_CLAUSE_DECL (rclauses) = rtmp;
8609 gimple_bind_set_body (new_stmt, new_body);
8613 /* A subroutine of lower_omp_single. Expand the simple form of
8614 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8616 if (GOMP_single_start ())
8617 BODY;
8618 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8620 FIXME. It may be better to delay expanding the logic of this until
8621 pass_expand_omp. The expanded logic may make the job more difficult
8622 to a synchronization analysis pass. */
8624 static void
8625 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8627 location_t loc = gimple_location (single_stmt);
8628 tree tlabel = create_artificial_label (loc);
8629 tree flabel = create_artificial_label (loc);
8630 gimple *call, *cond;
8631 tree lhs, decl;
8633 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8634 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8635 call = gimple_build_call (decl, 0);
8636 gimple_call_set_lhs (call, lhs);
8637 gimple_seq_add_stmt (pre_p, call);
8639 cond = gimple_build_cond (EQ_EXPR, lhs,
8640 fold_convert_loc (loc, TREE_TYPE (lhs),
8641 boolean_true_node),
8642 tlabel, flabel);
8643 gimple_seq_add_stmt (pre_p, cond);
8644 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8645 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8646 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8650 /* A subroutine of lower_omp_single. Expand the simple form of
8651 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8653 #pragma omp single copyprivate (a, b, c)
8655 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8658 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8660 BODY;
8661 copyout.a = a;
8662 copyout.b = b;
8663 copyout.c = c;
8664 GOMP_single_copy_end (&copyout);
8666 else
8668 a = copyout_p->a;
8669 b = copyout_p->b;
8670 c = copyout_p->c;
8672 GOMP_barrier ();
8675 FIXME. It may be better to delay expanding the logic of this until
8676 pass_expand_omp. The expanded logic may make the job more difficult
8677 to a synchronization analysis pass. */
8679 static void
8680 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8681 omp_context *ctx)
8683 tree ptr_type, t, l0, l1, l2, bfn_decl;
8684 gimple_seq copyin_seq;
8685 location_t loc = gimple_location (single_stmt);
8687 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8689 ptr_type = build_pointer_type (ctx->record_type);
8690 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8692 l0 = create_artificial_label (loc);
8693 l1 = create_artificial_label (loc);
8694 l2 = create_artificial_label (loc);
8696 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8697 t = build_call_expr_loc (loc, bfn_decl, 0);
8698 t = fold_convert_loc (loc, ptr_type, t);
8699 gimplify_assign (ctx->receiver_decl, t, pre_p);
8701 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8702 build_int_cst (ptr_type, 0));
8703 t = build3 (COND_EXPR, void_type_node, t,
8704 build_and_jump (&l0), build_and_jump (&l1));
8705 gimplify_and_add (t, pre_p);
8707 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8709 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8711 copyin_seq = NULL;
8712 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8713 &copyin_seq, ctx);
8715 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8716 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8717 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8718 gimplify_and_add (t, pre_p);
8720 t = build_and_jump (&l2);
8721 gimplify_and_add (t, pre_p);
8723 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8725 gimple_seq_add_seq (pre_p, copyin_seq);
8727 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8731 /* Expand code for an OpenMP single directive. */
8733 static void
8734 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8736 tree block;
8737 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8738 gbind *bind;
8739 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8741 push_gimplify_context ();
8743 block = make_node (BLOCK);
8744 bind = gimple_build_bind (NULL, NULL, block);
8745 gsi_replace (gsi_p, bind, true);
8746 bind_body = NULL;
8747 dlist = NULL;
8748 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8749 &bind_body, &dlist, ctx, NULL);
8750 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8752 gimple_seq_add_stmt (&bind_body, single_stmt);
8754 if (ctx->record_type)
8755 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8756 else
8757 lower_omp_single_simple (single_stmt, &bind_body);
8759 gimple_omp_set_body (single_stmt, NULL);
8761 gimple_seq_add_seq (&bind_body, dlist);
8763 bind_body = maybe_catch_exception (bind_body);
8765 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8766 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8767 gimple *g = gimple_build_omp_return (nowait);
8768 gimple_seq_add_stmt (&bind_body_tail, g);
8769 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8770 if (ctx->record_type)
8772 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8773 tree clobber = build_clobber (ctx->record_type);
8774 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8775 clobber), GSI_SAME_STMT);
8777 gimple_seq_add_seq (&bind_body, bind_body_tail);
8778 gimple_bind_set_body (bind, bind_body);
8780 pop_gimplify_context (bind);
8782 gimple_bind_append_vars (bind, ctx->block_vars);
8783 BLOCK_VARS (block) = ctx->block_vars;
8784 if (BLOCK_VARS (block))
8785 TREE_USED (block) = 1;
8789 /* Lower code for an OMP scope directive. */
8791 static void
8792 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8794 tree block;
8795 gimple *scope_stmt = gsi_stmt (*gsi_p);
8796 gbind *bind;
8797 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8798 gimple_seq tred_dlist = NULL;
8800 push_gimplify_context ();
8802 block = make_node (BLOCK);
8803 bind = gimple_build_bind (NULL, NULL, block);
8804 gsi_replace (gsi_p, bind, true);
8805 bind_body = NULL;
8806 dlist = NULL;
8808 tree rclauses
8809 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8810 OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8811 if (rclauses)
8813 tree type = build_pointer_type (pointer_sized_int_node);
8814 tree temp = create_tmp_var (type);
8815 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8816 OMP_CLAUSE_DECL (c) = temp;
8817 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8818 gimple_omp_scope_set_clauses (scope_stmt, c);
8819 lower_omp_task_reductions (ctx, OMP_SCOPE,
8820 gimple_omp_scope_clauses (scope_stmt),
8821 &bind_body, &tred_dlist);
8822 rclauses = c;
8823 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8824 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8825 gimple_seq_add_stmt (&bind_body, stmt);
8828 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8829 &bind_body, &dlist, ctx, NULL);
8830 lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8832 gimple_seq_add_stmt (&bind_body, scope_stmt);
8834 gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8836 gimple_omp_set_body (scope_stmt, NULL);
8838 gimple_seq clist = NULL;
8839 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8840 &bind_body, &clist, ctx);
8841 if (clist)
8843 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8844 gcall *g = gimple_build_call (fndecl, 0);
8845 gimple_seq_add_stmt (&bind_body, g);
8846 gimple_seq_add_seq (&bind_body, clist);
8847 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8848 g = gimple_build_call (fndecl, 0);
8849 gimple_seq_add_stmt (&bind_body, g);
8852 gimple_seq_add_seq (&bind_body, dlist);
8854 bind_body = maybe_catch_exception (bind_body);
8856 bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
8857 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8858 gimple *g = gimple_build_omp_return (nowait);
8859 gimple_seq_add_stmt (&bind_body_tail, g);
8860 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
8861 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8862 if (ctx->record_type)
8864 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8865 tree clobber = build_clobber (ctx->record_type);
8866 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8867 clobber), GSI_SAME_STMT);
8869 gimple_seq_add_seq (&bind_body, bind_body_tail);
8871 gimple_bind_set_body (bind, bind_body);
8873 pop_gimplify_context (bind);
8875 gimple_bind_append_vars (bind, ctx->block_vars);
8876 BLOCK_VARS (block) = ctx->block_vars;
8877 if (BLOCK_VARS (block))
8878 TREE_USED (block) = 1;
8880 /* Expand code for an OpenMP master or masked directive. */
8882 static void
8883 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8885 tree block, lab = NULL, x, bfn_decl;
8886 gimple *stmt = gsi_stmt (*gsi_p);
8887 gbind *bind;
8888 location_t loc = gimple_location (stmt);
8889 gimple_seq tseq;
8890 tree filter = integer_zero_node;
8892 push_gimplify_context ();
8894 if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
8896 filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
8897 OMP_CLAUSE_FILTER);
8898 if (filter)
8899 filter = fold_convert (integer_type_node,
8900 OMP_CLAUSE_FILTER_EXPR (filter));
8901 else
8902 filter = integer_zero_node;
8904 block = make_node (BLOCK);
8905 bind = gimple_build_bind (NULL, NULL, block);
8906 gsi_replace (gsi_p, bind, true);
8907 gimple_bind_add_stmt (bind, stmt);
8909 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8910 x = build_call_expr_loc (loc, bfn_decl, 0);
8911 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
8912 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8913 tseq = NULL;
8914 gimplify_and_add (x, &tseq);
8915 gimple_bind_add_seq (bind, tseq);
8917 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8918 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8919 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8920 gimple_omp_set_body (stmt, NULL);
8922 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8924 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8926 pop_gimplify_context (bind);
8928 gimple_bind_append_vars (bind, ctx->block_vars);
8929 BLOCK_VARS (block) = ctx->block_vars;
8932 /* Helper function for lower_omp_task_reductions. For a specific PASS
8933 find out the current clause it should be processed, or return false
8934 if all have been processed already. */
8936 static inline bool
8937 omp_task_reduction_iterate (int pass, enum tree_code code,
8938 enum omp_clause_code ccode, tree *c, tree *decl,
8939 tree *type, tree *next)
8941 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8943 if (ccode == OMP_CLAUSE_REDUCTION
8944 && code != OMP_TASKLOOP
8945 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8946 continue;
8947 *decl = OMP_CLAUSE_DECL (*c);
8948 *type = TREE_TYPE (*decl);
8949 if (TREE_CODE (*decl) == MEM_REF)
8951 if (pass != 1)
8952 continue;
8954 else
8956 if (omp_privatize_by_reference (*decl))
8957 *type = TREE_TYPE (*type);
8958 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8959 continue;
8961 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8962 return true;
8964 *decl = NULL_TREE;
8965 *type = NULL_TREE;
8966 *next = NULL_TREE;
8967 return false;
8970 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8971 OMP_TASKGROUP only with task modifier). Register mapping of those in
8972 START sequence and reducing them and unregister them in the END sequence. */
8974 static void
8975 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8976 gimple_seq *start, gimple_seq *end)
8978 enum omp_clause_code ccode
8979 = (code == OMP_TASKGROUP
8980 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8981 tree cancellable = NULL_TREE;
8982 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8983 if (clauses == NULL_TREE)
8984 return;
8985 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
8987 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8988 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8989 && outer->cancellable)
8991 cancellable = error_mark_node;
8992 break;
8994 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8995 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8996 break;
8998 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8999 tree *last = &TYPE_FIELDS (record_type);
9000 unsigned cnt = 0;
9001 if (cancellable)
9003 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9004 ptr_type_node);
9005 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9006 integer_type_node);
9007 *last = field;
9008 DECL_CHAIN (field) = ifield;
9009 last = &DECL_CHAIN (ifield);
9010 DECL_CONTEXT (field) = record_type;
9011 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9012 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9013 DECL_CONTEXT (ifield) = record_type;
9014 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9015 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9017 for (int pass = 0; pass < 2; pass++)
9019 tree decl, type, next;
9020 for (tree c = clauses;
9021 omp_task_reduction_iterate (pass, code, ccode,
9022 &c, &decl, &type, &next); c = next)
9024 ++cnt;
9025 tree new_type = type;
9026 if (ctx->outer)
9027 new_type = remap_type (type, &ctx->outer->cb);
9028 tree field
9029 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9030 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9031 new_type);
9032 if (DECL_P (decl) && type == TREE_TYPE (decl))
9034 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9035 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9036 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9038 else
9039 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9040 DECL_CONTEXT (field) = record_type;
9041 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9042 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9043 *last = field;
9044 last = &DECL_CHAIN (field);
9045 tree bfield
9046 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9047 boolean_type_node);
9048 DECL_CONTEXT (bfield) = record_type;
9049 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9050 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9051 *last = bfield;
9052 last = &DECL_CHAIN (bfield);
9055 *last = NULL_TREE;
9056 layout_type (record_type);
9058 /* Build up an array which registers with the runtime all the reductions
9059 and deregisters them at the end. Format documented in libgomp/task.c. */
9060 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9061 tree avar = create_tmp_var_raw (atype);
9062 gimple_add_tmp_var (avar);
9063 TREE_ADDRESSABLE (avar) = 1;
9064 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9065 NULL_TREE, NULL_TREE);
9066 tree t = build_int_cst (pointer_sized_int_node, cnt);
9067 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9068 gimple_seq seq = NULL;
9069 tree sz = fold_convert (pointer_sized_int_node,
9070 TYPE_SIZE_UNIT (record_type));
9071 int cachesz = 64;
9072 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9073 build_int_cst (pointer_sized_int_node, cachesz - 1));
9074 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9075 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9076 ctx->task_reductions.create (1 + cnt);
9077 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9078 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9079 ? sz : NULL_TREE);
9080 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9081 gimple_seq_add_seq (start, seq);
9082 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9083 NULL_TREE, NULL_TREE);
9084 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9085 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9086 NULL_TREE, NULL_TREE);
9087 t = build_int_cst (pointer_sized_int_node,
9088 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9089 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9090 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9091 NULL_TREE, NULL_TREE);
9092 t = build_int_cst (pointer_sized_int_node, -1);
9093 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9094 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9095 NULL_TREE, NULL_TREE);
9096 t = build_int_cst (pointer_sized_int_node, 0);
9097 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9099 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9100 and for each task reduction checks a bool right after the private variable
9101 within that thread's chunk; if the bool is clear, it hasn't been
9102 initialized and thus isn't going to be reduced nor destructed, otherwise
9103 reduce and destruct it. */
9104 tree idx = create_tmp_var (size_type_node);
9105 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9106 tree num_thr_sz = create_tmp_var (size_type_node);
9107 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9108 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9109 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9110 gimple *g;
9111 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9113 /* For worksharing constructs or scope, only perform it in the master
9114 thread, with the exception of cancelled implicit barriers - then only
9115 handle the current thread. */
9116 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9117 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9118 tree thr_num = create_tmp_var (integer_type_node);
9119 g = gimple_build_call (t, 0);
9120 gimple_call_set_lhs (g, thr_num);
9121 gimple_seq_add_stmt (end, g);
9122 if (cancellable)
9124 tree c;
9125 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9126 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9127 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9128 if (code == OMP_FOR)
9129 c = gimple_omp_for_clauses (ctx->stmt);
9130 else if (code == OMP_SECTIONS)
9131 c = gimple_omp_sections_clauses (ctx->stmt);
9132 else /* if (code == OMP_SCOPE) */
9133 c = gimple_omp_scope_clauses (ctx->stmt);
9134 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9135 cancellable = c;
9136 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9137 lab5, lab6);
9138 gimple_seq_add_stmt (end, g);
9139 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9140 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9141 gimple_seq_add_stmt (end, g);
9142 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9143 build_one_cst (TREE_TYPE (idx)));
9144 gimple_seq_add_stmt (end, g);
9145 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9146 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9148 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9149 gimple_seq_add_stmt (end, g);
9150 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9152 if (code != OMP_PARALLEL)
9154 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9155 tree num_thr = create_tmp_var (integer_type_node);
9156 g = gimple_build_call (t, 0);
9157 gimple_call_set_lhs (g, num_thr);
9158 gimple_seq_add_stmt (end, g);
9159 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9160 gimple_seq_add_stmt (end, g);
9161 if (cancellable)
9162 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9164 else
9166 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9167 OMP_CLAUSE__REDUCTEMP_);
9168 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9169 t = fold_convert (size_type_node, t);
9170 gimplify_assign (num_thr_sz, t, end);
9172 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9173 NULL_TREE, NULL_TREE);
9174 tree data = create_tmp_var (pointer_sized_int_node);
9175 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9176 if (code == OMP_TASKLOOP)
9178 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9179 g = gimple_build_cond (NE_EXPR, data,
9180 build_zero_cst (pointer_sized_int_node),
9181 lab1, lab7);
9182 gimple_seq_add_stmt (end, g);
9184 gimple_seq_add_stmt (end, gimple_build_label (lab1));
9185 tree ptr;
9186 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9187 ptr = create_tmp_var (build_pointer_type (record_type));
9188 else
9189 ptr = create_tmp_var (ptr_type_node);
9190 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9192 tree field = TYPE_FIELDS (record_type);
9193 cnt = 0;
9194 if (cancellable)
9195 field = DECL_CHAIN (DECL_CHAIN (field));
9196 for (int pass = 0; pass < 2; pass++)
9198 tree decl, type, next;
9199 for (tree c = clauses;
9200 omp_task_reduction_iterate (pass, code, ccode,
9201 &c, &decl, &type, &next); c = next)
9203 tree var = decl, ref;
9204 if (TREE_CODE (decl) == MEM_REF)
9206 var = TREE_OPERAND (var, 0);
9207 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9208 var = TREE_OPERAND (var, 0);
9209 tree v = var;
9210 if (TREE_CODE (var) == ADDR_EXPR)
9211 var = TREE_OPERAND (var, 0);
9212 else if (TREE_CODE (var) == INDIRECT_REF)
9213 var = TREE_OPERAND (var, 0);
9214 tree orig_var = var;
9215 if (is_variable_sized (var))
9217 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9218 var = DECL_VALUE_EXPR (var);
9219 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
9220 var = TREE_OPERAND (var, 0);
9221 gcc_assert (DECL_P (var));
9223 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9224 if (orig_var != var)
9225 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9226 else if (TREE_CODE (v) == ADDR_EXPR)
9227 t = build_fold_addr_expr (t);
9228 else if (TREE_CODE (v) == INDIRECT_REF)
9229 t = build_fold_indirect_ref (t);
9230 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9232 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9233 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9234 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9236 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9237 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9238 fold_convert (size_type_node,
9239 TREE_OPERAND (decl, 1)));
9241 else
9243 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9244 if (!omp_privatize_by_reference (decl))
9245 t = build_fold_addr_expr (t);
9247 t = fold_convert (pointer_sized_int_node, t);
9248 seq = NULL;
9249 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9250 gimple_seq_add_seq (start, seq);
9251 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9252 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9253 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9254 t = unshare_expr (byte_position (field));
9255 t = fold_convert (pointer_sized_int_node, t);
9256 ctx->task_reduction_map->put (c, cnt);
9257 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9258 ? t : NULL_TREE);
9259 seq = NULL;
9260 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9261 gimple_seq_add_seq (start, seq);
9262 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9263 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9264 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9266 tree bfield = DECL_CHAIN (field);
9267 tree cond;
9268 if (code == OMP_PARALLEL
9269 || code == OMP_FOR
9270 || code == OMP_SECTIONS
9271 || code == OMP_SCOPE)
9272 /* In parallel, worksharing or scope all threads unconditionally
9273 initialize all their task reduction private variables. */
9274 cond = boolean_true_node;
9275 else if (TREE_TYPE (ptr) == ptr_type_node)
9277 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9278 unshare_expr (byte_position (bfield)));
9279 seq = NULL;
9280 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9281 gimple_seq_add_seq (end, seq);
9282 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9283 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9284 build_int_cst (pbool, 0));
9286 else
9287 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9288 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9289 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9290 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9291 tree condv = create_tmp_var (boolean_type_node);
9292 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9293 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9294 lab3, lab4);
9295 gimple_seq_add_stmt (end, g);
9296 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9297 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9299 /* If this reduction doesn't need destruction and parallel
9300 has been cancelled, there is nothing to do for this
9301 reduction, so jump around the merge operation. */
9302 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9303 g = gimple_build_cond (NE_EXPR, cancellable,
9304 build_zero_cst (TREE_TYPE (cancellable)),
9305 lab4, lab5);
9306 gimple_seq_add_stmt (end, g);
9307 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9310 tree new_var;
9311 if (TREE_TYPE (ptr) == ptr_type_node)
9313 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9314 unshare_expr (byte_position (field)));
9315 seq = NULL;
9316 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9317 gimple_seq_add_seq (end, seq);
9318 tree pbool = build_pointer_type (TREE_TYPE (field));
9319 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9320 build_int_cst (pbool, 0));
9322 else
9323 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9324 build_simple_mem_ref (ptr), field, NULL_TREE);
9326 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9327 if (TREE_CODE (decl) != MEM_REF
9328 && omp_privatize_by_reference (decl))
9329 ref = build_simple_mem_ref (ref);
9330 /* reduction(-:var) sums up the partial results, so it acts
9331 identically to reduction(+:var). */
9332 if (rcode == MINUS_EXPR)
9333 rcode = PLUS_EXPR;
9334 if (TREE_CODE (decl) == MEM_REF)
9336 tree type = TREE_TYPE (new_var);
9337 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9338 tree i = create_tmp_var (TREE_TYPE (v));
9339 tree ptype = build_pointer_type (TREE_TYPE (type));
9340 if (DECL_P (v))
9342 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9343 tree vv = create_tmp_var (TREE_TYPE (v));
9344 gimplify_assign (vv, v, start);
9345 v = vv;
9347 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9348 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9349 new_var = build_fold_addr_expr (new_var);
9350 new_var = fold_convert (ptype, new_var);
9351 ref = fold_convert (ptype, ref);
9352 tree m = create_tmp_var (ptype);
9353 gimplify_assign (m, new_var, end);
9354 new_var = m;
9355 m = create_tmp_var (ptype);
9356 gimplify_assign (m, ref, end);
9357 ref = m;
9358 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9359 tree body = create_artificial_label (UNKNOWN_LOCATION);
9360 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9361 gimple_seq_add_stmt (end, gimple_build_label (body));
9362 tree priv = build_simple_mem_ref (new_var);
9363 tree out = build_simple_mem_ref (ref);
9364 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9366 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9367 tree decl_placeholder
9368 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9369 tree lab6 = NULL_TREE;
9370 if (cancellable)
9372 /* If this reduction needs destruction and parallel
9373 has been cancelled, jump around the merge operation
9374 to the destruction. */
9375 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9376 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9377 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9378 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9379 lab6, lab5);
9380 gimple_seq_add_stmt (end, g);
9381 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9383 SET_DECL_VALUE_EXPR (placeholder, out);
9384 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9385 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9386 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9387 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9388 gimple_seq_add_seq (end,
9389 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9390 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9391 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9393 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9394 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9396 if (cancellable)
9397 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9398 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9399 if (x)
9401 gimple_seq tseq = NULL;
9402 gimplify_stmt (&x, &tseq);
9403 gimple_seq_add_seq (end, tseq);
9406 else
9408 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9409 out = unshare_expr (out);
9410 gimplify_assign (out, x, end);
9412 gimple *g
9413 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9414 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9415 gimple_seq_add_stmt (end, g);
9416 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9417 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9418 gimple_seq_add_stmt (end, g);
9419 g = gimple_build_assign (i, PLUS_EXPR, i,
9420 build_int_cst (TREE_TYPE (i), 1));
9421 gimple_seq_add_stmt (end, g);
9422 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9423 gimple_seq_add_stmt (end, g);
9424 gimple_seq_add_stmt (end, gimple_build_label (endl));
9426 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9428 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9429 tree oldv = NULL_TREE;
9430 tree lab6 = NULL_TREE;
9431 if (cancellable)
9433 /* If this reduction needs destruction and parallel
9434 has been cancelled, jump around the merge operation
9435 to the destruction. */
9436 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9437 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9438 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9439 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9440 lab6, lab5);
9441 gimple_seq_add_stmt (end, g);
9442 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9444 if (omp_privatize_by_reference (decl)
9445 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9446 TREE_TYPE (ref)))
9447 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9448 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9449 tree refv = create_tmp_var (TREE_TYPE (ref));
9450 gimplify_assign (refv, ref, end);
9451 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9452 SET_DECL_VALUE_EXPR (placeholder, ref);
9453 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9454 tree d = maybe_lookup_decl (decl, ctx);
9455 gcc_assert (d);
9456 if (DECL_HAS_VALUE_EXPR_P (d))
9457 oldv = DECL_VALUE_EXPR (d);
9458 if (omp_privatize_by_reference (var))
9460 tree v = fold_convert (TREE_TYPE (d),
9461 build_fold_addr_expr (new_var));
9462 SET_DECL_VALUE_EXPR (d, v);
9464 else
9465 SET_DECL_VALUE_EXPR (d, new_var);
9466 DECL_HAS_VALUE_EXPR_P (d) = 1;
9467 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9468 if (oldv)
9469 SET_DECL_VALUE_EXPR (d, oldv);
9470 else
9472 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9473 DECL_HAS_VALUE_EXPR_P (d) = 0;
9475 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9476 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9477 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9478 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9479 if (cancellable)
9480 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9481 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9482 if (x)
9484 gimple_seq tseq = NULL;
9485 gimplify_stmt (&x, &tseq);
9486 gimple_seq_add_seq (end, tseq);
9489 else
9491 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9492 ref = unshare_expr (ref);
9493 gimplify_assign (ref, x, end);
9495 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9496 ++cnt;
9497 field = DECL_CHAIN (bfield);
9501 if (code == OMP_TASKGROUP)
9503 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9504 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9505 gimple_seq_add_stmt (start, g);
9507 else
9509 tree c;
9510 if (code == OMP_FOR)
9511 c = gimple_omp_for_clauses (ctx->stmt);
9512 else if (code == OMP_SECTIONS)
9513 c = gimple_omp_sections_clauses (ctx->stmt);
9514 else if (code == OMP_SCOPE)
9515 c = gimple_omp_scope_clauses (ctx->stmt);
9516 else
9517 c = gimple_omp_taskreg_clauses (ctx->stmt);
9518 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9519 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9520 build_fold_addr_expr (avar));
9521 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9524 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9525 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9526 size_one_node));
9527 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9528 gimple_seq_add_stmt (end, g);
9529 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9530 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9532 enum built_in_function bfn
9533 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9534 t = builtin_decl_explicit (bfn);
9535 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9536 tree arg;
9537 if (cancellable)
9539 arg = create_tmp_var (c_bool_type);
9540 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9541 cancellable));
9543 else
9544 arg = build_int_cst (c_bool_type, 0);
9545 g = gimple_build_call (t, 1, arg);
9547 else
9549 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9550 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9552 gimple_seq_add_stmt (end, g);
9553 if (lab7)
9554 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9555 t = build_constructor (atype, NULL);
9556 TREE_THIS_VOLATILE (t) = 1;
9557 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9560 /* Expand code for an OpenMP taskgroup directive. */
9562 static void
9563 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9565 gimple *stmt = gsi_stmt (*gsi_p);
9566 gcall *x;
9567 gbind *bind;
9568 gimple_seq dseq = NULL;
9569 tree block = make_node (BLOCK);
9571 bind = gimple_build_bind (NULL, NULL, block);
9572 gsi_replace (gsi_p, bind, true);
9573 gimple_bind_add_stmt (bind, stmt);
9575 push_gimplify_context ();
9577 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9579 gimple_bind_add_stmt (bind, x);
9581 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9582 gimple_omp_taskgroup_clauses (stmt),
9583 gimple_bind_body_ptr (bind), &dseq);
9585 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9586 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9587 gimple_omp_set_body (stmt, NULL);
9589 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9590 gimple_bind_add_seq (bind, dseq);
9592 pop_gimplify_context (bind);
9594 gimple_bind_append_vars (bind, ctx->block_vars);
9595 BLOCK_VARS (block) = ctx->block_vars;
9599 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9601 static void
9602 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9603 omp_context *ctx)
9605 struct omp_for_data fd;
9606 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9607 return;
9609 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9610 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9611 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9612 if (!fd.ordered)
9613 return;
9615 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9616 tree c = gimple_omp_ordered_clauses (ord_stmt);
9617 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9618 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9620 /* Merge depend clauses from multiple adjacent
9621 #pragma omp ordered depend(sink:...) constructs
9622 into one #pragma omp ordered depend(sink:...), so that
9623 we can optimize them together. */
9624 gimple_stmt_iterator gsi = *gsi_p;
9625 gsi_next (&gsi);
9626 while (!gsi_end_p (gsi))
9628 gimple *stmt = gsi_stmt (gsi);
9629 if (is_gimple_debug (stmt)
9630 || gimple_code (stmt) == GIMPLE_NOP)
9632 gsi_next (&gsi);
9633 continue;
9635 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9636 break;
9637 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9638 c = gimple_omp_ordered_clauses (ord_stmt2);
9639 if (c == NULL_TREE
9640 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
9641 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9642 break;
9643 while (*list_p)
9644 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9645 *list_p = c;
9646 gsi_remove (&gsi, true);
9650 /* Canonicalize sink dependence clauses into one folded clause if
9651 possible.
9653 The basic algorithm is to create a sink vector whose first
9654 element is the GCD of all the first elements, and whose remaining
9655 elements are the minimum of the subsequent columns.
9657 We ignore dependence vectors whose first element is zero because
9658 such dependencies are known to be executed by the same thread.
9660 We take into account the direction of the loop, so a minimum
9661 becomes a maximum if the loop is iterating forwards. We also
9662 ignore sink clauses where the loop direction is unknown, or where
9663 the offsets are clearly invalid because they are not a multiple
9664 of the loop increment.
9666 For example:
9668 #pragma omp for ordered(2)
9669 for (i=0; i < N; ++i)
9670 for (j=0; j < M; ++j)
9672 #pragma omp ordered \
9673 depend(sink:i-8,j-2) \
9674 depend(sink:i,j-1) \ // Completely ignored because i+0.
9675 depend(sink:i-4,j-3) \
9676 depend(sink:i-6,j-4)
9677 #pragma omp ordered depend(source)
9680 Folded clause is:
9682 depend(sink:-gcd(8,4,6),-min(2,3,4))
9683 -or-
9684 depend(sink:-2,-2)
9687 /* FIXME: Computing GCD's where the first element is zero is
9688 non-trivial in the presence of collapsed loops. Do this later. */
9689 if (fd.collapse > 1)
9690 return;
9692 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9694 /* wide_int is not a POD so it must be default-constructed. */
9695 for (unsigned i = 0; i != 2 * len - 1; ++i)
9696 new (static_cast<void*>(folded_deps + i)) wide_int ();
9698 tree folded_dep = NULL_TREE;
9699 /* TRUE if the first dimension's offset is negative. */
9700 bool neg_offset_p = false;
9702 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9703 unsigned int i;
9704 while ((c = *list_p) != NULL)
9706 bool remove = false;
9708 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
9709 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9710 goto next_ordered_clause;
9712 tree vec;
9713 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9714 vec && TREE_CODE (vec) == TREE_LIST;
9715 vec = TREE_CHAIN (vec), ++i)
9717 gcc_assert (i < len);
9719 /* omp_extract_for_data has canonicalized the condition. */
9720 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9721 || fd.loops[i].cond_code == GT_EXPR);
9722 bool forward = fd.loops[i].cond_code == LT_EXPR;
9723 bool maybe_lexically_later = true;
9725 /* While the committee makes up its mind, bail if we have any
9726 non-constant steps. */
9727 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9728 goto lower_omp_ordered_ret;
9730 tree itype = TREE_TYPE (TREE_VALUE (vec));
9731 if (POINTER_TYPE_P (itype))
9732 itype = sizetype;
9733 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9734 TYPE_PRECISION (itype),
9735 TYPE_SIGN (itype));
9737 /* Ignore invalid offsets that are not multiples of the step. */
9738 if (!wi::multiple_of_p (wi::abs (offset),
9739 wi::abs (wi::to_wide (fd.loops[i].step)),
9740 UNSIGNED))
9742 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9743 "ignoring sink clause with offset that is not "
9744 "a multiple of the loop step");
9745 remove = true;
9746 goto next_ordered_clause;
9749 /* Calculate the first dimension. The first dimension of
9750 the folded dependency vector is the GCD of the first
9751 elements, while ignoring any first elements whose offset
9752 is 0. */
9753 if (i == 0)
9755 /* Ignore dependence vectors whose first dimension is 0. */
9756 if (offset == 0)
9758 remove = true;
9759 goto next_ordered_clause;
9761 else
9763 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9765 error_at (OMP_CLAUSE_LOCATION (c),
9766 "first offset must be in opposite direction "
9767 "of loop iterations");
9768 goto lower_omp_ordered_ret;
9770 if (forward)
9771 offset = -offset;
9772 neg_offset_p = forward;
9773 /* Initialize the first time around. */
9774 if (folded_dep == NULL_TREE)
9776 folded_dep = c;
9777 folded_deps[0] = offset;
9779 else
9780 folded_deps[0] = wi::gcd (folded_deps[0],
9781 offset, UNSIGNED);
9784 /* Calculate minimum for the remaining dimensions. */
9785 else
9787 folded_deps[len + i - 1] = offset;
9788 if (folded_dep == c)
9789 folded_deps[i] = offset;
9790 else if (maybe_lexically_later
9791 && !wi::eq_p (folded_deps[i], offset))
9793 if (forward ^ wi::gts_p (folded_deps[i], offset))
9795 unsigned int j;
9796 folded_dep = c;
9797 for (j = 1; j <= i; j++)
9798 folded_deps[j] = folded_deps[len + j - 1];
9800 else
9801 maybe_lexically_later = false;
9805 gcc_assert (i == len);
9807 remove = true;
9809 next_ordered_clause:
9810 if (remove)
9811 *list_p = OMP_CLAUSE_CHAIN (c);
9812 else
9813 list_p = &OMP_CLAUSE_CHAIN (c);
9816 if (folded_dep)
9818 if (neg_offset_p)
9819 folded_deps[0] = -folded_deps[0];
9821 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9822 if (POINTER_TYPE_P (itype))
9823 itype = sizetype;
9825 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9826 = wide_int_to_tree (itype, folded_deps[0]);
9827 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9828 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9831 lower_omp_ordered_ret:
9833 /* Ordered without clauses is #pragma omp threads, while we want
9834 a nop instead if we remove all clauses. */
9835 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9836 gsi_replace (gsi_p, gimple_build_nop (), true);
9840 /* Expand code for an OpenMP ordered directive. */
9842 static void
9843 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9845 tree block;
9846 gimple *stmt = gsi_stmt (*gsi_p), *g;
9847 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9848 gcall *x;
9849 gbind *bind;
9850 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9851 OMP_CLAUSE_SIMD);
9852 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9853 loop. */
9854 bool maybe_simt
9855 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9856 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9857 OMP_CLAUSE_THREADS);
9859 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9860 OMP_CLAUSE_DEPEND))
9862 /* FIXME: This is needs to be moved to the expansion to verify various
9863 conditions only testable on cfg with dominators computed, and also
9864 all the depend clauses to be merged still might need to be available
9865 for the runtime checks. */
9866 if (0)
9867 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9868 return;
9871 push_gimplify_context ();
9873 block = make_node (BLOCK);
9874 bind = gimple_build_bind (NULL, NULL, block);
9875 gsi_replace (gsi_p, bind, true);
9876 gimple_bind_add_stmt (bind, stmt);
9878 if (simd)
9880 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9881 build_int_cst (NULL_TREE, threads));
9882 cfun->has_simduid_loops = true;
9884 else
9885 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9887 gimple_bind_add_stmt (bind, x);
9889 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9890 if (maybe_simt)
9892 counter = create_tmp_var (integer_type_node);
9893 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9894 gimple_call_set_lhs (g, counter);
9895 gimple_bind_add_stmt (bind, g);
9897 body = create_artificial_label (UNKNOWN_LOCATION);
9898 test = create_artificial_label (UNKNOWN_LOCATION);
9899 gimple_bind_add_stmt (bind, gimple_build_label (body));
9901 tree simt_pred = create_tmp_var (integer_type_node);
9902 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9903 gimple_call_set_lhs (g, simt_pred);
9904 gimple_bind_add_stmt (bind, g);
9906 tree t = create_artificial_label (UNKNOWN_LOCATION);
9907 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9908 gimple_bind_add_stmt (bind, g);
9910 gimple_bind_add_stmt (bind, gimple_build_label (t));
9912 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9913 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9914 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9915 gimple_omp_set_body (stmt, NULL);
9917 if (maybe_simt)
9919 gimple_bind_add_stmt (bind, gimple_build_label (test));
9920 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9921 gimple_bind_add_stmt (bind, g);
9923 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9924 tree nonneg = create_tmp_var (integer_type_node);
9925 gimple_seq tseq = NULL;
9926 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9927 gimple_bind_add_seq (bind, tseq);
9929 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9930 gimple_call_set_lhs (g, nonneg);
9931 gimple_bind_add_stmt (bind, g);
9933 tree end = create_artificial_label (UNKNOWN_LOCATION);
9934 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9935 gimple_bind_add_stmt (bind, g);
9937 gimple_bind_add_stmt (bind, gimple_build_label (end));
9939 if (simd)
9940 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9941 build_int_cst (NULL_TREE, threads));
9942 else
9943 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9945 gimple_bind_add_stmt (bind, x);
9947 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9949 pop_gimplify_context (bind);
9951 gimple_bind_append_vars (bind, ctx->block_vars);
9952 BLOCK_VARS (block) = gimple_bind_vars (bind);
9956 /* Expand code for an OpenMP scan directive and the structured block
9957 before the scan directive. */
9959 static void
9960 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9962 gimple *stmt = gsi_stmt (*gsi_p);
9963 bool has_clauses
9964 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9965 tree lane = NULL_TREE;
9966 gimple_seq before = NULL;
9967 omp_context *octx = ctx->outer;
9968 gcc_assert (octx);
9969 if (octx->scan_exclusive && !has_clauses)
9971 gimple_stmt_iterator gsi2 = *gsi_p;
9972 gsi_next (&gsi2);
9973 gimple *stmt2 = gsi_stmt (gsi2);
9974 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9975 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9976 the one with exclusive clause(s), comes first. */
9977 if (stmt2
9978 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9979 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9981 gsi_remove (gsi_p, false);
9982 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9983 ctx = maybe_lookup_ctx (stmt2);
9984 gcc_assert (ctx);
9985 lower_omp_scan (gsi_p, ctx);
9986 return;
9990 bool input_phase = has_clauses ^ octx->scan_inclusive;
9991 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9992 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9993 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9994 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9995 && !gimple_omp_for_combined_p (octx->stmt));
9996 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9997 if (is_for_simd && octx->for_simd_scan_phase)
9998 is_simd = false;
9999 if (is_simd)
10000 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
10001 OMP_CLAUSE__SIMDUID_))
10003 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10004 lane = create_tmp_var (unsigned_type_node);
10005 tree t = build_int_cst (integer_type_node,
10006 input_phase ? 1
10007 : octx->scan_inclusive ? 2 : 3);
10008 gimple *g
10009 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10010 gimple_call_set_lhs (g, lane);
10011 gimple_seq_add_stmt (&before, g);
10014 if (is_simd || is_for)
10016 for (tree c = gimple_omp_for_clauses (octx->stmt);
10017 c; c = OMP_CLAUSE_CHAIN (c))
10018 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10019 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10021 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10022 tree var = OMP_CLAUSE_DECL (c);
10023 tree new_var = lookup_decl (var, octx);
10024 tree val = new_var;
10025 tree var2 = NULL_TREE;
10026 tree var3 = NULL_TREE;
10027 tree var4 = NULL_TREE;
10028 tree lane0 = NULL_TREE;
10029 tree new_vard = new_var;
10030 if (omp_privatize_by_reference (var))
10032 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10033 val = new_var;
10035 if (DECL_HAS_VALUE_EXPR_P (new_vard))
10037 val = DECL_VALUE_EXPR (new_vard);
10038 if (new_vard != new_var)
10040 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10041 val = TREE_OPERAND (val, 0);
10043 if (TREE_CODE (val) == ARRAY_REF
10044 && VAR_P (TREE_OPERAND (val, 0)))
10046 tree v = TREE_OPERAND (val, 0);
10047 if (lookup_attribute ("omp simd array",
10048 DECL_ATTRIBUTES (v)))
10050 val = unshare_expr (val);
10051 lane0 = TREE_OPERAND (val, 1);
10052 TREE_OPERAND (val, 1) = lane;
10053 var2 = lookup_decl (v, octx);
10054 if (octx->scan_exclusive)
10055 var4 = lookup_decl (var2, octx);
10056 if (input_phase
10057 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10058 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10059 if (!input_phase)
10061 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10062 var2, lane, NULL_TREE, NULL_TREE);
10063 TREE_THIS_NOTRAP (var2) = 1;
10064 if (octx->scan_exclusive)
10066 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10067 var4, lane, NULL_TREE,
10068 NULL_TREE);
10069 TREE_THIS_NOTRAP (var4) = 1;
10072 else
10073 var2 = val;
10076 gcc_assert (var2);
10078 else
10080 var2 = build_outer_var_ref (var, octx);
10081 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10083 var3 = maybe_lookup_decl (new_vard, octx);
10084 if (var3 == new_vard || var3 == NULL_TREE)
10085 var3 = NULL_TREE;
10086 else if (is_simd && octx->scan_exclusive && !input_phase)
10088 var4 = maybe_lookup_decl (var3, octx);
10089 if (var4 == var3 || var4 == NULL_TREE)
10091 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10093 var4 = var3;
10094 var3 = NULL_TREE;
10096 else
10097 var4 = NULL_TREE;
10101 if (is_simd
10102 && octx->scan_exclusive
10103 && !input_phase
10104 && var4 == NULL_TREE)
10105 var4 = create_tmp_var (TREE_TYPE (val));
10107 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10109 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10110 if (input_phase)
10112 if (var3)
10114 /* If we've added a separate identity element
10115 variable, copy it over into val. */
10116 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10117 var3);
10118 gimplify_and_add (x, &before);
10120 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10122 /* Otherwise, assign to it the identity element. */
10123 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10124 if (is_for)
10125 tseq = copy_gimple_seq_and_replace_locals (tseq);
10126 tree ref = build_outer_var_ref (var, octx);
10127 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10128 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10129 if (x)
10131 if (new_vard != new_var)
10132 val = build_fold_addr_expr_loc (clause_loc, val);
10133 SET_DECL_VALUE_EXPR (new_vard, val);
10135 SET_DECL_VALUE_EXPR (placeholder, ref);
10136 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10137 lower_omp (&tseq, octx);
10138 if (x)
10139 SET_DECL_VALUE_EXPR (new_vard, x);
10140 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10141 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10142 gimple_seq_add_seq (&before, tseq);
10143 if (is_simd)
10144 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10147 else if (is_simd)
10149 tree x;
10150 if (octx->scan_exclusive)
10152 tree v4 = unshare_expr (var4);
10153 tree v2 = unshare_expr (var2);
10154 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10155 gimplify_and_add (x, &before);
10157 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10158 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10159 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10160 tree vexpr = val;
10161 if (x && new_vard != new_var)
10162 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10163 if (x)
10164 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10165 SET_DECL_VALUE_EXPR (placeholder, var2);
10166 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10167 lower_omp (&tseq, octx);
10168 gimple_seq_add_seq (&before, tseq);
10169 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10170 if (x)
10171 SET_DECL_VALUE_EXPR (new_vard, x);
10172 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10173 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10174 if (octx->scan_inclusive)
10176 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10177 var2);
10178 gimplify_and_add (x, &before);
10180 else if (lane0 == NULL_TREE)
10182 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10183 var4);
10184 gimplify_and_add (x, &before);
10188 else
10190 if (input_phase)
10192 /* input phase. Set val to initializer before
10193 the body. */
10194 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10195 gimplify_assign (val, x, &before);
10197 else if (is_simd)
10199 /* scan phase. */
10200 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10201 if (code == MINUS_EXPR)
10202 code = PLUS_EXPR;
10204 tree x = build2 (code, TREE_TYPE (var2),
10205 unshare_expr (var2), unshare_expr (val));
10206 if (octx->scan_inclusive)
10208 gimplify_assign (unshare_expr (var2), x, &before);
10209 gimplify_assign (val, var2, &before);
10211 else
10213 gimplify_assign (unshare_expr (var4),
10214 unshare_expr (var2), &before);
10215 gimplify_assign (var2, x, &before);
10216 if (lane0 == NULL_TREE)
10217 gimplify_assign (val, var4, &before);
10221 if (octx->scan_exclusive && !input_phase && lane0)
10223 tree vexpr = unshare_expr (var4);
10224 TREE_OPERAND (vexpr, 1) = lane0;
10225 if (new_vard != new_var)
10226 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10227 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10231 if (is_simd && !is_for_simd)
10233 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10234 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10235 gsi_replace (gsi_p, gimple_build_nop (), true);
10236 return;
10238 lower_omp (gimple_omp_body_ptr (stmt), octx);
10239 if (before)
10241 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
10242 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10247 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10248 substitution of a couple of function calls. But in the NAMED case,
10249 requires that languages coordinate a symbol name. It is therefore
10250 best put here in common code. */
10252 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10254 static void
10255 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10257 tree block;
10258 tree name, lock, unlock;
10259 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10260 gbind *bind;
10261 location_t loc = gimple_location (stmt);
10262 gimple_seq tbody;
10264 name = gimple_omp_critical_name (stmt);
10265 if (name)
10267 tree decl;
10269 if (!critical_name_mutexes)
10270 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10272 tree *n = critical_name_mutexes->get (name);
10273 if (n == NULL)
10275 char *new_str;
10277 decl = create_tmp_var_raw (ptr_type_node);
10279 new_str = ACONCAT ((".gomp_critical_user_",
10280 IDENTIFIER_POINTER (name), NULL));
10281 DECL_NAME (decl) = get_identifier (new_str);
10282 TREE_PUBLIC (decl) = 1;
10283 TREE_STATIC (decl) = 1;
10284 DECL_COMMON (decl) = 1;
10285 DECL_ARTIFICIAL (decl) = 1;
10286 DECL_IGNORED_P (decl) = 1;
10288 varpool_node::finalize_decl (decl);
10290 critical_name_mutexes->put (name, decl);
10292 else
10293 decl = *n;
10295 /* If '#pragma omp critical' is inside offloaded region or
10296 inside function marked as offloadable, the symbol must be
10297 marked as offloadable too. */
10298 omp_context *octx;
10299 if (cgraph_node::get (current_function_decl)->offloadable)
10300 varpool_node::get_create (decl)->offloadable = 1;
10301 else
10302 for (octx = ctx->outer; octx; octx = octx->outer)
10303 if (is_gimple_omp_offloaded (octx->stmt))
10305 varpool_node::get_create (decl)->offloadable = 1;
10306 break;
10309 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10310 lock = build_call_expr_loc (loc, lock, 1,
10311 build_fold_addr_expr_loc (loc, decl));
10313 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10314 unlock = build_call_expr_loc (loc, unlock, 1,
10315 build_fold_addr_expr_loc (loc, decl));
10317 else
10319 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10320 lock = build_call_expr_loc (loc, lock, 0);
10322 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10323 unlock = build_call_expr_loc (loc, unlock, 0);
10326 push_gimplify_context ();
10328 block = make_node (BLOCK);
10329 bind = gimple_build_bind (NULL, NULL, block);
10330 gsi_replace (gsi_p, bind, true);
10331 gimple_bind_add_stmt (bind, stmt);
10333 tbody = gimple_bind_body (bind);
10334 gimplify_and_add (lock, &tbody);
10335 gimple_bind_set_body (bind, tbody);
10337 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10338 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10339 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10340 gimple_omp_set_body (stmt, NULL);
10342 tbody = gimple_bind_body (bind);
10343 gimplify_and_add (unlock, &tbody);
10344 gimple_bind_set_body (bind, tbody);
10346 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10348 pop_gimplify_context (bind);
10349 gimple_bind_append_vars (bind, ctx->block_vars);
10350 BLOCK_VARS (block) = gimple_bind_vars (bind);
10353 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10354 for a lastprivate clause. Given a loop control predicate of (V
10355 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10356 is appended to *DLIST, iterator initialization is appended to
10357 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10358 to be emitted in a critical section. */
10360 static void
10361 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10362 gimple_seq *dlist, gimple_seq *clist,
10363 struct omp_context *ctx)
10365 tree clauses, cond, vinit;
10366 enum tree_code cond_code;
10367 gimple_seq stmts;
10369 cond_code = fd->loop.cond_code;
10370 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10372 /* When possible, use a strict equality expression. This can let VRP
10373 type optimizations deduce the value and remove a copy. */
10374 if (tree_fits_shwi_p (fd->loop.step))
10376 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10377 if (step == 1 || step == -1)
10378 cond_code = EQ_EXPR;
10381 tree n2 = fd->loop.n2;
10382 if (fd->collapse > 1
10383 && TREE_CODE (n2) != INTEGER_CST
10384 && gimple_omp_for_combined_into_p (fd->for_stmt))
10386 struct omp_context *taskreg_ctx = NULL;
10387 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10389 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10390 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10391 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10393 if (gimple_omp_for_combined_into_p (gfor))
10395 gcc_assert (ctx->outer->outer
10396 && is_parallel_ctx (ctx->outer->outer));
10397 taskreg_ctx = ctx->outer->outer;
10399 else
10401 struct omp_for_data outer_fd;
10402 omp_extract_for_data (gfor, &outer_fd, NULL);
10403 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10406 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10407 taskreg_ctx = ctx->outer->outer;
10409 else if (is_taskreg_ctx (ctx->outer))
10410 taskreg_ctx = ctx->outer;
10411 if (taskreg_ctx)
10413 int i;
10414 tree taskreg_clauses
10415 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10416 tree innerc = omp_find_clause (taskreg_clauses,
10417 OMP_CLAUSE__LOOPTEMP_);
10418 gcc_assert (innerc);
10419 int count = fd->collapse;
10420 if (fd->non_rect
10421 && fd->last_nonrect == fd->first_nonrect + 1)
10422 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10423 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10424 count += 4;
10425 for (i = 0; i < count; i++)
10427 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10428 OMP_CLAUSE__LOOPTEMP_);
10429 gcc_assert (innerc);
10431 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10432 OMP_CLAUSE__LOOPTEMP_);
10433 if (innerc)
10434 n2 = fold_convert (TREE_TYPE (n2),
10435 lookup_decl (OMP_CLAUSE_DECL (innerc),
10436 taskreg_ctx));
10439 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10441 clauses = gimple_omp_for_clauses (fd->for_stmt);
10442 stmts = NULL;
10443 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10444 if (!gimple_seq_empty_p (stmts))
10446 gimple_seq_add_seq (&stmts, *dlist);
10447 *dlist = stmts;
10449 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10450 vinit = fd->loop.n1;
10451 if (cond_code == EQ_EXPR
10452 && tree_fits_shwi_p (fd->loop.n2)
10453 && ! integer_zerop (fd->loop.n2))
10454 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10455 else
10456 vinit = unshare_expr (vinit);
10458 /* Initialize the iterator variable, so that threads that don't execute
10459 any iterations don't execute the lastprivate clauses by accident. */
10460 gimplify_assign (fd->loop.v, vinit, body_p);
10464 /* OpenACC privatization.
10466 Or, in other words, *sharing* at the respective OpenACC level of
10467 parallelism.
10469 From a correctness perspective, a non-addressable variable can't be accessed
10470 outside the current thread, so it can go in a (faster than shared memory)
10471 register -- though that register may need to be broadcast in some
10472 circumstances. A variable can only meaningfully be "shared" across workers
10473 or vector lanes if its address is taken, e.g. by a call to an atomic
10474 builtin.
10476 From an optimisation perspective, the answer might be fuzzier: maybe
10477 sometimes, using shared memory directly would be faster than
10478 broadcasting. */
10480 static void
10481 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10482 const location_t loc, const tree c,
10483 const tree decl)
10485 const dump_user_location_t d_u_loc
10486 = dump_user_location_t::from_location_t (loc);
10487 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10488 #if __GNUC__ >= 10
10489 # pragma GCC diagnostic push
10490 # pragma GCC diagnostic ignored "-Wformat"
10491 #endif
10492 dump_printf_loc (l_dump_flags, d_u_loc,
10493 "variable %<%T%> ", decl);
10494 #if __GNUC__ >= 10
10495 # pragma GCC diagnostic pop
10496 #endif
10497 if (c)
10498 dump_printf (l_dump_flags,
10499 "in %qs clause ",
10500 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10501 else
10502 dump_printf (l_dump_flags,
10503 "declared in block ");
10506 static bool
10507 oacc_privatization_candidate_p (const location_t loc, const tree c,
10508 const tree decl)
10510 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10512 /* There is some differentiation depending on block vs. clause. */
10513 bool block = !c;
10515 bool res = true;
10517 if (res && !VAR_P (decl))
10519 res = false;
10521 if (dump_enabled_p ())
10523 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10524 dump_printf (l_dump_flags,
10525 "potentially has improper OpenACC privatization level: %qs\n",
10526 get_tree_code_name (TREE_CODE (decl)));
10530 if (res && block && TREE_STATIC (decl))
10532 res = false;
10534 if (dump_enabled_p ())
10536 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10537 dump_printf (l_dump_flags,
10538 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10539 "static");
10543 if (res && block && DECL_EXTERNAL (decl))
10545 res = false;
10547 if (dump_enabled_p ())
10549 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10550 dump_printf (l_dump_flags,
10551 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10552 "external");
10556 if (res && !TREE_ADDRESSABLE (decl))
10558 res = false;
10560 if (dump_enabled_p ())
10562 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10563 dump_printf (l_dump_flags,
10564 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10565 "not addressable");
10569 if (res)
10571 if (dump_enabled_p ())
10573 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10574 dump_printf (l_dump_flags,
10575 "is candidate for adjusting OpenACC privatization level\n");
10579 if (dump_file && (dump_flags & TDF_DETAILS))
10581 print_generic_decl (dump_file, decl, dump_flags);
10582 fprintf (dump_file, "\n");
10585 return res;
10588 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10589 CTX. */
10591 static void
10592 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10594 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10595 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10597 tree decl = OMP_CLAUSE_DECL (c);
10599 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c, decl))
10600 continue;
10602 gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10603 ctx->oacc_privatization_candidates.safe_push (decl);
10607 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10608 CTX. */
10610 static void
10611 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10613 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10615 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL, decl))
10616 continue;
10618 gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10619 ctx->oacc_privatization_candidates.safe_push (decl);
10623 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10625 static tree
10626 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10627 struct walk_stmt_info *wi)
10629 gimple *stmt = gsi_stmt (*gsi_p);
10631 *handled_ops_p = true;
10632 switch (gimple_code (stmt))
10634 WALK_SUBSTMTS;
10636 case GIMPLE_OMP_FOR:
10637 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10638 && gimple_omp_for_combined_into_p (stmt))
10639 *handled_ops_p = false;
10640 break;
10642 case GIMPLE_OMP_SCAN:
10643 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10644 return integer_zero_node;
10645 default:
10646 break;
10648 return NULL;
10651 /* Helper function for lower_omp_for, add transformations for a worksharing
10652 loop with scan directives inside of it.
10653 For worksharing loop not combined with simd, transform:
10654 #pragma omp for reduction(inscan,+:r) private(i)
10655 for (i = 0; i < n; i = i + 1)
10658 update (r);
10660 #pragma omp scan inclusive(r)
10662 use (r);
10666 into two worksharing loops + code to merge results:
10668 num_threads = omp_get_num_threads ();
10669 thread_num = omp_get_thread_num ();
10670 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10671 <D.2099>:
10672 var2 = r;
10673 goto <D.2101>;
10674 <D.2100>:
10675 // For UDRs this is UDR init, or if ctors are needed, copy from
10676 // var3 that has been constructed to contain the neutral element.
10677 var2 = 0;
10678 <D.2101>:
10679 ivar = 0;
10680 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10681 // a shared array with num_threads elements and rprivb to a local array
10682 // number of elements equal to the number of (contiguous) iterations the
10683 // current thread will perform. controlb and controlp variables are
10684 // temporaries to handle deallocation of rprivb at the end of second
10685 // GOMP_FOR.
10686 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10687 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10688 for (i = 0; i < n; i = i + 1)
10691 // For UDRs this is UDR init or copy from var3.
10692 r = 0;
10693 // This is the input phase from user code.
10694 update (r);
10697 // For UDRs this is UDR merge.
10698 var2 = var2 + r;
10699 // Rather than handing it over to the user, save to local thread's
10700 // array.
10701 rprivb[ivar] = var2;
10702 // For exclusive scan, the above two statements are swapped.
10703 ivar = ivar + 1;
10706 // And remember the final value from this thread's into the shared
10707 // rpriva array.
10708 rpriva[(sizetype) thread_num] = var2;
10709 // If more than one thread, compute using Work-Efficient prefix sum
10710 // the inclusive parallel scan of the rpriva array.
10711 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10712 <D.2102>:
10713 GOMP_barrier ();
10714 down = 0;
10715 k = 1;
10716 num_threadsu = (unsigned int) num_threads;
10717 thread_numup1 = (unsigned int) thread_num + 1;
10718 <D.2108>:
10719 twok = k << 1;
10720 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10721 <D.2110>:
10722 down = 4294967295;
10723 k = k >> 1;
10724 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10725 <D.2112>:
10726 k = k >> 1;
10727 <D.2111>:
10728 twok = k << 1;
10729 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10730 mul = REALPART_EXPR <cplx>;
10731 ovf = IMAGPART_EXPR <cplx>;
10732 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10733 <D.2116>:
10734 andv = k & down;
10735 andvm1 = andv + 4294967295;
10736 l = mul + andvm1;
10737 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10738 <D.2120>:
10739 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10740 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10741 rpriva[l] = rpriva[l - k] + rpriva[l];
10742 <D.2117>:
10743 if (down == 0) goto <D.2121>; else goto <D.2122>;
10744 <D.2121>:
10745 k = k << 1;
10746 goto <D.2123>;
10747 <D.2122>:
10748 k = k >> 1;
10749 <D.2123>:
10750 GOMP_barrier ();
10751 if (k != 0) goto <D.2108>; else goto <D.2103>;
10752 <D.2103>:
10753 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10754 <D.2124>:
10755 // For UDRs this is UDR init or copy from var3.
10756 var2 = 0;
10757 goto <D.2126>;
10758 <D.2125>:
10759 var2 = rpriva[thread_num - 1];
10760 <D.2126>:
10761 ivar = 0;
10762 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10763 reduction(inscan,+:r) private(i)
10764 for (i = 0; i < n; i = i + 1)
10767 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10768 r = var2 + rprivb[ivar];
10771 // This is the scan phase from user code.
10772 use (r);
10773 // Plus a bump of the iterator.
10774 ivar = ivar + 1;
10776 } */
10778 static void
10779 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10780 struct omp_for_data *fd, omp_context *ctx)
10782 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10783 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10785 gimple_seq body = gimple_omp_body (stmt);
10786 gimple_stmt_iterator input1_gsi = gsi_none ();
10787 struct walk_stmt_info wi;
10788 memset (&wi, 0, sizeof (wi));
10789 wi.val_only = true;
10790 wi.info = (void *) &input1_gsi;
10791 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10792 gcc_assert (!gsi_end_p (input1_gsi));
10794 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10795 gimple_stmt_iterator gsi = input1_gsi;
10796 gsi_next (&gsi);
10797 gimple_stmt_iterator scan1_gsi = gsi;
10798 gimple *scan_stmt1 = gsi_stmt (gsi);
10799 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10801 gimple_seq input_body = gimple_omp_body (input_stmt1);
10802 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10803 gimple_omp_set_body (input_stmt1, NULL);
10804 gimple_omp_set_body (scan_stmt1, NULL);
10805 gimple_omp_set_body (stmt, NULL);
10807 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10808 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10809 gimple_omp_set_body (stmt, body);
10810 gimple_omp_set_body (input_stmt1, input_body);
10812 gimple_stmt_iterator input2_gsi = gsi_none ();
10813 memset (&wi, 0, sizeof (wi));
10814 wi.val_only = true;
10815 wi.info = (void *) &input2_gsi;
10816 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10817 gcc_assert (!gsi_end_p (input2_gsi));
10819 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10820 gsi = input2_gsi;
10821 gsi_next (&gsi);
10822 gimple_stmt_iterator scan2_gsi = gsi;
10823 gimple *scan_stmt2 = gsi_stmt (gsi);
10824 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10825 gimple_omp_set_body (scan_stmt2, scan_body);
10827 gimple_stmt_iterator input3_gsi = gsi_none ();
10828 gimple_stmt_iterator scan3_gsi = gsi_none ();
10829 gimple_stmt_iterator input4_gsi = gsi_none ();
10830 gimple_stmt_iterator scan4_gsi = gsi_none ();
10831 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10832 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10833 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10834 if (is_for_simd)
10836 memset (&wi, 0, sizeof (wi));
10837 wi.val_only = true;
10838 wi.info = (void *) &input3_gsi;
10839 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10840 gcc_assert (!gsi_end_p (input3_gsi));
10842 input_stmt3 = gsi_stmt (input3_gsi);
10843 gsi = input3_gsi;
10844 gsi_next (&gsi);
10845 scan3_gsi = gsi;
10846 scan_stmt3 = gsi_stmt (gsi);
10847 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10849 memset (&wi, 0, sizeof (wi));
10850 wi.val_only = true;
10851 wi.info = (void *) &input4_gsi;
10852 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10853 gcc_assert (!gsi_end_p (input4_gsi));
10855 input_stmt4 = gsi_stmt (input4_gsi);
10856 gsi = input4_gsi;
10857 gsi_next (&gsi);
10858 scan4_gsi = gsi;
10859 scan_stmt4 = gsi_stmt (gsi);
10860 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10862 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10863 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10866 tree num_threads = create_tmp_var (integer_type_node);
10867 tree thread_num = create_tmp_var (integer_type_node);
10868 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10869 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10870 gimple *g = gimple_build_call (nthreads_decl, 0);
10871 gimple_call_set_lhs (g, num_threads);
10872 gimple_seq_add_stmt (body_p, g);
10873 g = gimple_build_call (threadnum_decl, 0);
10874 gimple_call_set_lhs (g, thread_num);
10875 gimple_seq_add_stmt (body_p, g);
10877 tree ivar = create_tmp_var (sizetype);
10878 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10879 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10880 tree k = create_tmp_var (unsigned_type_node);
10881 tree l = create_tmp_var (unsigned_type_node);
10883 gimple_seq clist = NULL, mdlist = NULL;
10884 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10885 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10886 gimple_seq scan1_list = NULL, input2_list = NULL;
10887 gimple_seq last_list = NULL, reduc_list = NULL;
10888 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10889 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10890 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10892 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10893 tree var = OMP_CLAUSE_DECL (c);
10894 tree new_var = lookup_decl (var, ctx);
10895 tree var3 = NULL_TREE;
10896 tree new_vard = new_var;
10897 if (omp_privatize_by_reference (var))
10898 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10899 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10901 var3 = maybe_lookup_decl (new_vard, ctx);
10902 if (var3 == new_vard)
10903 var3 = NULL_TREE;
10906 tree ptype = build_pointer_type (TREE_TYPE (new_var));
10907 tree rpriva = create_tmp_var (ptype);
10908 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10909 OMP_CLAUSE_DECL (nc) = rpriva;
10910 *cp1 = nc;
10911 cp1 = &OMP_CLAUSE_CHAIN (nc);
10913 tree rprivb = create_tmp_var (ptype);
10914 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10915 OMP_CLAUSE_DECL (nc) = rprivb;
10916 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10917 *cp1 = nc;
10918 cp1 = &OMP_CLAUSE_CHAIN (nc);
10920 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10921 if (new_vard != new_var)
10922 TREE_ADDRESSABLE (var2) = 1;
10923 gimple_add_tmp_var (var2);
10925 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
10926 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10927 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10928 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10929 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
10931 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
10932 thread_num, integer_minus_one_node);
10933 x = fold_convert_loc (clause_loc, sizetype, x);
10934 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10935 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10936 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10937 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
10939 x = fold_convert_loc (clause_loc, sizetype, l);
10940 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10941 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10942 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10943 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
10945 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
10946 x = fold_convert_loc (clause_loc, sizetype, x);
10947 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10948 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10949 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10950 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
10952 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
10953 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10954 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
10955 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
10957 tree var4 = is_for_simd ? new_var : var2;
10958 tree var5 = NULL_TREE, var6 = NULL_TREE;
10959 if (is_for_simd)
10961 var5 = lookup_decl (var, input_simd_ctx);
10962 var6 = lookup_decl (var, scan_simd_ctx);
10963 if (new_vard != new_var)
10965 var5 = build_simple_mem_ref_loc (clause_loc, var5);
10966 var6 = build_simple_mem_ref_loc (clause_loc, var6);
10969 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10971 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10972 tree val = var2;
10974 x = lang_hooks.decls.omp_clause_default_ctor
10975 (c, var2, build_outer_var_ref (var, ctx));
10976 if (x)
10977 gimplify_and_add (x, &clist);
10979 x = build_outer_var_ref (var, ctx);
10980 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
10982 gimplify_and_add (x, &thr01_list);
10984 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
10985 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10986 if (var3)
10988 x = unshare_expr (var4);
10989 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10990 gimplify_and_add (x, &thrn1_list);
10991 x = unshare_expr (var4);
10992 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10993 gimplify_and_add (x, &thr02_list);
10995 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10997 /* Otherwise, assign to it the identity element. */
10998 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10999 tseq = copy_gimple_seq_and_replace_locals (tseq);
11000 if (!is_for_simd)
11002 if (new_vard != new_var)
11003 val = build_fold_addr_expr_loc (clause_loc, val);
11004 SET_DECL_VALUE_EXPR (new_vard, val);
11005 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11007 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11008 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11009 lower_omp (&tseq, ctx);
11010 gimple_seq_add_seq (&thrn1_list, tseq);
11011 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11012 lower_omp (&tseq, ctx);
11013 gimple_seq_add_seq (&thr02_list, tseq);
11014 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11015 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11016 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11017 if (y)
11018 SET_DECL_VALUE_EXPR (new_vard, y);
11019 else
11021 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11022 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11026 x = unshare_expr (var4);
11027 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11028 gimplify_and_add (x, &thrn2_list);
11030 if (is_for_simd)
11032 x = unshare_expr (rprivb_ref);
11033 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11034 gimplify_and_add (x, &scan1_list);
11036 else
11038 if (ctx->scan_exclusive)
11040 x = unshare_expr (rprivb_ref);
11041 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11042 gimplify_and_add (x, &scan1_list);
11045 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11046 tseq = copy_gimple_seq_and_replace_locals (tseq);
11047 SET_DECL_VALUE_EXPR (placeholder, var2);
11048 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11049 lower_omp (&tseq, ctx);
11050 gimple_seq_add_seq (&scan1_list, tseq);
11052 if (ctx->scan_inclusive)
11054 x = unshare_expr (rprivb_ref);
11055 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11056 gimplify_and_add (x, &scan1_list);
11060 x = unshare_expr (rpriva_ref);
11061 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11062 unshare_expr (var4));
11063 gimplify_and_add (x, &mdlist);
11065 x = unshare_expr (is_for_simd ? var6 : new_var);
11066 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11067 gimplify_and_add (x, &input2_list);
11069 val = rprivb_ref;
11070 if (new_vard != new_var)
11071 val = build_fold_addr_expr_loc (clause_loc, val);
11073 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11074 tseq = copy_gimple_seq_and_replace_locals (tseq);
11075 SET_DECL_VALUE_EXPR (new_vard, val);
11076 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11077 if (is_for_simd)
11079 SET_DECL_VALUE_EXPR (placeholder, var6);
11080 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11082 else
11083 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11084 lower_omp (&tseq, ctx);
11085 if (y)
11086 SET_DECL_VALUE_EXPR (new_vard, y);
11087 else
11089 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11090 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11092 if (!is_for_simd)
11094 SET_DECL_VALUE_EXPR (placeholder, new_var);
11095 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11096 lower_omp (&tseq, ctx);
11098 gimple_seq_add_seq (&input2_list, tseq);
11100 x = build_outer_var_ref (var, ctx);
11101 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11102 gimplify_and_add (x, &last_list);
11104 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11105 gimplify_and_add (x, &reduc_list);
11106 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11107 tseq = copy_gimple_seq_and_replace_locals (tseq);
11108 val = rprival_ref;
11109 if (new_vard != new_var)
11110 val = build_fold_addr_expr_loc (clause_loc, val);
11111 SET_DECL_VALUE_EXPR (new_vard, val);
11112 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11113 SET_DECL_VALUE_EXPR (placeholder, var2);
11114 lower_omp (&tseq, ctx);
11115 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11116 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11117 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11118 if (y)
11119 SET_DECL_VALUE_EXPR (new_vard, y);
11120 else
11122 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11123 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11125 gimple_seq_add_seq (&reduc_list, tseq);
11126 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11127 gimplify_and_add (x, &reduc_list);
11129 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11130 if (x)
11131 gimplify_and_add (x, dlist);
11133 else
11135 x = build_outer_var_ref (var, ctx);
11136 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11138 x = omp_reduction_init (c, TREE_TYPE (new_var));
11139 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11140 &thrn1_list);
11141 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11143 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11145 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11146 if (code == MINUS_EXPR)
11147 code = PLUS_EXPR;
11149 if (is_for_simd)
11150 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11151 else
11153 if (ctx->scan_exclusive)
11154 gimplify_assign (unshare_expr (rprivb_ref), var2,
11155 &scan1_list);
11156 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11157 gimplify_assign (var2, x, &scan1_list);
11158 if (ctx->scan_inclusive)
11159 gimplify_assign (unshare_expr (rprivb_ref), var2,
11160 &scan1_list);
11163 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11164 &mdlist);
11166 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11167 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11169 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11170 &last_list);
11172 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11173 unshare_expr (rprival_ref));
11174 gimplify_assign (rprival_ref, x, &reduc_list);
11178 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11179 gimple_seq_add_stmt (&scan1_list, g);
11180 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11181 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11182 ? scan_stmt4 : scan_stmt2), g);
11184 tree controlb = create_tmp_var (boolean_type_node);
11185 tree controlp = create_tmp_var (ptr_type_node);
11186 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11187 OMP_CLAUSE_DECL (nc) = controlb;
11188 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11189 *cp1 = nc;
11190 cp1 = &OMP_CLAUSE_CHAIN (nc);
11191 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11192 OMP_CLAUSE_DECL (nc) = controlp;
11193 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11194 *cp1 = nc;
11195 cp1 = &OMP_CLAUSE_CHAIN (nc);
11196 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11197 OMP_CLAUSE_DECL (nc) = controlb;
11198 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11199 *cp2 = nc;
11200 cp2 = &OMP_CLAUSE_CHAIN (nc);
11201 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11202 OMP_CLAUSE_DECL (nc) = controlp;
11203 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11204 *cp2 = nc;
11205 cp2 = &OMP_CLAUSE_CHAIN (nc);
11207 *cp1 = gimple_omp_for_clauses (stmt);
11208 gimple_omp_for_set_clauses (stmt, new_clauses1);
11209 *cp2 = gimple_omp_for_clauses (new_stmt);
11210 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11212 if (is_for_simd)
11214 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11215 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11217 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11218 GSI_SAME_STMT);
11219 gsi_remove (&input3_gsi, true);
11220 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11221 GSI_SAME_STMT);
11222 gsi_remove (&scan3_gsi, true);
11223 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11224 GSI_SAME_STMT);
11225 gsi_remove (&input4_gsi, true);
11226 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11227 GSI_SAME_STMT);
11228 gsi_remove (&scan4_gsi, true);
11230 else
11232 gimple_omp_set_body (scan_stmt1, scan1_list);
11233 gimple_omp_set_body (input_stmt2, input2_list);
11236 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11237 GSI_SAME_STMT);
11238 gsi_remove (&input1_gsi, true);
11239 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11240 GSI_SAME_STMT);
11241 gsi_remove (&scan1_gsi, true);
11242 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11243 GSI_SAME_STMT);
11244 gsi_remove (&input2_gsi, true);
11245 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11246 GSI_SAME_STMT);
11247 gsi_remove (&scan2_gsi, true);
11249 gimple_seq_add_seq (body_p, clist);
11251 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11252 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11253 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11254 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11255 gimple_seq_add_stmt (body_p, g);
11256 g = gimple_build_label (lab1);
11257 gimple_seq_add_stmt (body_p, g);
11258 gimple_seq_add_seq (body_p, thr01_list);
11259 g = gimple_build_goto (lab3);
11260 gimple_seq_add_stmt (body_p, g);
11261 g = gimple_build_label (lab2);
11262 gimple_seq_add_stmt (body_p, g);
11263 gimple_seq_add_seq (body_p, thrn1_list);
11264 g = gimple_build_label (lab3);
11265 gimple_seq_add_stmt (body_p, g);
11267 g = gimple_build_assign (ivar, size_zero_node);
11268 gimple_seq_add_stmt (body_p, g);
11270 gimple_seq_add_stmt (body_p, stmt);
11271 gimple_seq_add_seq (body_p, body);
11272 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11273 fd->loop.v));
11275 g = gimple_build_omp_return (true);
11276 gimple_seq_add_stmt (body_p, g);
11277 gimple_seq_add_seq (body_p, mdlist);
11279 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11280 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11281 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11282 gimple_seq_add_stmt (body_p, g);
11283 g = gimple_build_label (lab1);
11284 gimple_seq_add_stmt (body_p, g);
11286 g = omp_build_barrier (NULL);
11287 gimple_seq_add_stmt (body_p, g);
11289 tree down = create_tmp_var (unsigned_type_node);
11290 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11291 gimple_seq_add_stmt (body_p, g);
11293 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11294 gimple_seq_add_stmt (body_p, g);
11296 tree num_threadsu = create_tmp_var (unsigned_type_node);
11297 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11298 gimple_seq_add_stmt (body_p, g);
11300 tree thread_numu = create_tmp_var (unsigned_type_node);
11301 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11302 gimple_seq_add_stmt (body_p, g);
11304 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11305 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11306 build_int_cst (unsigned_type_node, 1));
11307 gimple_seq_add_stmt (body_p, g);
11309 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11310 g = gimple_build_label (lab3);
11311 gimple_seq_add_stmt (body_p, g);
11313 tree twok = create_tmp_var (unsigned_type_node);
11314 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11315 gimple_seq_add_stmt (body_p, g);
11317 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11318 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11319 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11320 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11321 gimple_seq_add_stmt (body_p, g);
11322 g = gimple_build_label (lab4);
11323 gimple_seq_add_stmt (body_p, g);
11324 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11325 gimple_seq_add_stmt (body_p, g);
11326 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11327 gimple_seq_add_stmt (body_p, g);
11329 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11330 gimple_seq_add_stmt (body_p, g);
11331 g = gimple_build_label (lab6);
11332 gimple_seq_add_stmt (body_p, g);
11334 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11335 gimple_seq_add_stmt (body_p, g);
11337 g = gimple_build_label (lab5);
11338 gimple_seq_add_stmt (body_p, g);
11340 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11341 gimple_seq_add_stmt (body_p, g);
11343 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11344 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11345 gimple_call_set_lhs (g, cplx);
11346 gimple_seq_add_stmt (body_p, g);
11347 tree mul = create_tmp_var (unsigned_type_node);
11348 g = gimple_build_assign (mul, REALPART_EXPR,
11349 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11350 gimple_seq_add_stmt (body_p, g);
11351 tree ovf = create_tmp_var (unsigned_type_node);
11352 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11353 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11354 gimple_seq_add_stmt (body_p, g);
11356 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11357 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11358 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11359 lab7, lab8);
11360 gimple_seq_add_stmt (body_p, g);
11361 g = gimple_build_label (lab7);
11362 gimple_seq_add_stmt (body_p, g);
11364 tree andv = create_tmp_var (unsigned_type_node);
11365 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11366 gimple_seq_add_stmt (body_p, g);
11367 tree andvm1 = create_tmp_var (unsigned_type_node);
11368 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11369 build_minus_one_cst (unsigned_type_node));
11370 gimple_seq_add_stmt (body_p, g);
11372 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11373 gimple_seq_add_stmt (body_p, g);
11375 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11376 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11377 gimple_seq_add_stmt (body_p, g);
11378 g = gimple_build_label (lab9);
11379 gimple_seq_add_stmt (body_p, g);
11380 gimple_seq_add_seq (body_p, reduc_list);
11381 g = gimple_build_label (lab8);
11382 gimple_seq_add_stmt (body_p, g);
11384 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11385 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11386 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11387 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11388 lab10, lab11);
11389 gimple_seq_add_stmt (body_p, g);
11390 g = gimple_build_label (lab10);
11391 gimple_seq_add_stmt (body_p, g);
11392 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11393 gimple_seq_add_stmt (body_p, g);
11394 g = gimple_build_goto (lab12);
11395 gimple_seq_add_stmt (body_p, g);
11396 g = gimple_build_label (lab11);
11397 gimple_seq_add_stmt (body_p, g);
11398 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11399 gimple_seq_add_stmt (body_p, g);
11400 g = gimple_build_label (lab12);
11401 gimple_seq_add_stmt (body_p, g);
11403 g = omp_build_barrier (NULL);
11404 gimple_seq_add_stmt (body_p, g);
11406 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11407 lab3, lab2);
11408 gimple_seq_add_stmt (body_p, g);
11410 g = gimple_build_label (lab2);
11411 gimple_seq_add_stmt (body_p, g);
11413 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11414 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11415 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11416 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11417 gimple_seq_add_stmt (body_p, g);
11418 g = gimple_build_label (lab1);
11419 gimple_seq_add_stmt (body_p, g);
11420 gimple_seq_add_seq (body_p, thr02_list);
11421 g = gimple_build_goto (lab3);
11422 gimple_seq_add_stmt (body_p, g);
11423 g = gimple_build_label (lab2);
11424 gimple_seq_add_stmt (body_p, g);
11425 gimple_seq_add_seq (body_p, thrn2_list);
11426 g = gimple_build_label (lab3);
11427 gimple_seq_add_stmt (body_p, g);
11429 g = gimple_build_assign (ivar, size_zero_node);
11430 gimple_seq_add_stmt (body_p, g);
11431 gimple_seq_add_stmt (body_p, new_stmt);
11432 gimple_seq_add_seq (body_p, new_body);
11434 gimple_seq new_dlist = NULL;
11435 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11436 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11437 tree num_threadsm1 = create_tmp_var (integer_type_node);
11438 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11439 integer_minus_one_node);
11440 gimple_seq_add_stmt (&new_dlist, g);
11441 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11442 gimple_seq_add_stmt (&new_dlist, g);
11443 g = gimple_build_label (lab1);
11444 gimple_seq_add_stmt (&new_dlist, g);
11445 gimple_seq_add_seq (&new_dlist, last_list);
11446 g = gimple_build_label (lab2);
11447 gimple_seq_add_stmt (&new_dlist, g);
11448 gimple_seq_add_seq (&new_dlist, *dlist);
11449 *dlist = new_dlist;
11452 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11453 the addresses of variables to be made private at the surrounding
11454 parallelism level. Such functions appear in the gimple code stream in two
11455 forms, e.g. for a partitioned loop:
11457 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11458 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11459 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11460 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11462 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11463 not as part of a HEAD_MARK sequence:
11465 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11467 For such stand-alone appearances, the 3rd argument is always 0, denoting
11468 gang partitioning. */
11470 static gcall *
11471 lower_oacc_private_marker (omp_context *ctx)
11473 if (ctx->oacc_privatization_candidates.length () == 0)
11474 return NULL;
11476 auto_vec<tree, 5> args;
11478 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11479 args.quick_push (integer_zero_node);
11480 args.quick_push (integer_minus_one_node);
11482 int i;
11483 tree decl;
11484 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11486 for (omp_context *thisctx = ctx; thisctx; thisctx = thisctx->outer)
11488 tree inner_decl = maybe_lookup_decl (decl, thisctx);
11489 if (inner_decl)
11491 decl = inner_decl;
11492 break;
11495 gcc_checking_assert (decl);
11497 tree addr = build_fold_addr_expr (decl);
11498 args.safe_push (addr);
11501 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11504 /* Lower code for an OMP loop directive. */
11506 static void
11507 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11509 tree *rhs_p, block;
11510 struct omp_for_data fd, *fdp = NULL;
11511 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11512 gbind *new_stmt;
11513 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11514 gimple_seq cnt_list = NULL, clist = NULL;
11515 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11516 size_t i;
11518 push_gimplify_context ();
11520 if (is_gimple_omp_oacc (ctx->stmt))
11521 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11523 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11525 block = make_node (BLOCK);
11526 new_stmt = gimple_build_bind (NULL, NULL, block);
11527 /* Replace at gsi right away, so that 'stmt' is no member
11528 of a sequence anymore as we're going to add to a different
11529 one below. */
11530 gsi_replace (gsi_p, new_stmt, true);
11532 /* Move declaration of temporaries in the loop body before we make
11533 it go away. */
11534 omp_for_body = gimple_omp_body (stmt);
11535 if (!gimple_seq_empty_p (omp_for_body)
11536 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11538 gbind *inner_bind
11539 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11540 tree vars = gimple_bind_vars (inner_bind);
11541 if (is_gimple_omp_oacc (ctx->stmt))
11542 oacc_privatization_scan_decl_chain (ctx, vars);
11543 gimple_bind_append_vars (new_stmt, vars);
11544 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11545 keep them on the inner_bind and it's block. */
11546 gimple_bind_set_vars (inner_bind, NULL_TREE);
11547 if (gimple_bind_block (inner_bind))
11548 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11551 if (gimple_omp_for_combined_into_p (stmt))
11553 omp_extract_for_data (stmt, &fd, NULL);
11554 fdp = &fd;
11556 /* We need two temporaries with fd.loop.v type (istart/iend)
11557 and then (fd.collapse - 1) temporaries with the same
11558 type for count2 ... countN-1 vars if not constant. */
11559 size_t count = 2;
11560 tree type = fd.iter_type;
11561 if (fd.collapse > 1
11562 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11563 count += fd.collapse - 1;
11564 size_t count2 = 0;
11565 tree type2 = NULL_TREE;
11566 bool taskreg_for
11567 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11568 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11569 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11570 tree simtc = NULL;
11571 tree clauses = *pc;
11572 if (fd.collapse > 1
11573 && fd.non_rect
11574 && fd.last_nonrect == fd.first_nonrect + 1
11575 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11576 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11577 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11579 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11580 type2 = TREE_TYPE (v);
11581 count++;
11582 count2 = 3;
11584 if (taskreg_for)
11585 outerc
11586 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11587 OMP_CLAUSE__LOOPTEMP_);
11588 if (ctx->simt_stmt)
11589 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11590 OMP_CLAUSE__LOOPTEMP_);
11591 for (i = 0; i < count + count2; i++)
11593 tree temp;
11594 if (taskreg_for)
11596 gcc_assert (outerc);
11597 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11598 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11599 OMP_CLAUSE__LOOPTEMP_);
11601 else
11603 /* If there are 2 adjacent SIMD stmts, one with _simt_
11604 clause, another without, make sure they have the same
11605 decls in _looptemp_ clauses, because the outer stmt
11606 they are combined into will look up just one inner_stmt. */
11607 if (ctx->simt_stmt)
11608 temp = OMP_CLAUSE_DECL (simtc);
11609 else
11610 temp = create_tmp_var (i >= count ? type2 : type);
11611 insert_decl_map (&ctx->outer->cb, temp, temp);
11613 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11614 OMP_CLAUSE_DECL (*pc) = temp;
11615 pc = &OMP_CLAUSE_CHAIN (*pc);
11616 if (ctx->simt_stmt)
11617 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11618 OMP_CLAUSE__LOOPTEMP_);
11620 *pc = clauses;
11623 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11624 dlist = NULL;
11625 body = NULL;
11626 tree rclauses
11627 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11628 OMP_CLAUSE_REDUCTION);
11629 tree rtmp = NULL_TREE;
11630 if (rclauses)
11632 tree type = build_pointer_type (pointer_sized_int_node);
11633 tree temp = create_tmp_var (type);
11634 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11635 OMP_CLAUSE_DECL (c) = temp;
11636 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11637 gimple_omp_for_set_clauses (stmt, c);
11638 lower_omp_task_reductions (ctx, OMP_FOR,
11639 gimple_omp_for_clauses (stmt),
11640 &tred_ilist, &tred_dlist);
11641 rclauses = c;
11642 rtmp = make_ssa_name (type);
11643 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11646 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11647 ctx);
11649 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11650 fdp);
11651 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11652 gimple_omp_for_pre_body (stmt));
11654 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11656 gcall *private_marker = NULL;
11657 if (is_gimple_omp_oacc (ctx->stmt)
11658 && !gimple_seq_empty_p (omp_for_body))
11659 private_marker = lower_oacc_private_marker (ctx);
11661 /* Lower the header expressions. At this point, we can assume that
11662 the header is of the form:
11664 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11666 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11667 using the .omp_data_s mapping, if needed. */
11668 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11670 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11671 if (TREE_CODE (*rhs_p) == TREE_VEC)
11673 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11674 TREE_VEC_ELT (*rhs_p, 1)
11675 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11676 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11677 TREE_VEC_ELT (*rhs_p, 2)
11678 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11680 else if (!is_gimple_min_invariant (*rhs_p))
11681 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11682 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11683 recompute_tree_invariant_for_addr_expr (*rhs_p);
11685 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11686 if (TREE_CODE (*rhs_p) == TREE_VEC)
11688 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11689 TREE_VEC_ELT (*rhs_p, 1)
11690 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11691 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11692 TREE_VEC_ELT (*rhs_p, 2)
11693 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11695 else if (!is_gimple_min_invariant (*rhs_p))
11696 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11697 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11698 recompute_tree_invariant_for_addr_expr (*rhs_p);
11700 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11701 if (!is_gimple_min_invariant (*rhs_p))
11702 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11704 if (rclauses)
11705 gimple_seq_add_seq (&tred_ilist, cnt_list);
11706 else
11707 gimple_seq_add_seq (&body, cnt_list);
11709 /* Once lowered, extract the bounds and clauses. */
11710 omp_extract_for_data (stmt, &fd, NULL);
11712 if (is_gimple_omp_oacc (ctx->stmt)
11713 && !ctx_in_oacc_kernels_region (ctx))
11714 lower_oacc_head_tail (gimple_location (stmt),
11715 gimple_omp_for_clauses (stmt), private_marker,
11716 &oacc_head, &oacc_tail, ctx);
11718 /* Add OpenACC partitioning and reduction markers just before the loop. */
11719 if (oacc_head)
11720 gimple_seq_add_seq (&body, oacc_head);
11722 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11724 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11725 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11726 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11727 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11729 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11730 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11731 OMP_CLAUSE_LINEAR_STEP (c)
11732 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11733 ctx);
11736 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11737 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11738 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11739 else
11741 gimple_seq_add_stmt (&body, stmt);
11742 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11745 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11746 fd.loop.v));
11748 /* After the loop, add exit clauses. */
11749 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11751 if (clist)
11753 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11754 gcall *g = gimple_build_call (fndecl, 0);
11755 gimple_seq_add_stmt (&body, g);
11756 gimple_seq_add_seq (&body, clist);
11757 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11758 g = gimple_build_call (fndecl, 0);
11759 gimple_seq_add_stmt (&body, g);
11762 if (ctx->cancellable)
11763 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11765 gimple_seq_add_seq (&body, dlist);
11767 if (rclauses)
11769 gimple_seq_add_seq (&tred_ilist, body);
11770 body = tred_ilist;
11773 body = maybe_catch_exception (body);
11775 /* Region exit marker goes at the end of the loop body. */
11776 gimple *g = gimple_build_omp_return (fd.have_nowait);
11777 gimple_seq_add_stmt (&body, g);
11779 gimple_seq_add_seq (&body, tred_dlist);
11781 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11783 if (rclauses)
11784 OMP_CLAUSE_DECL (rclauses) = rtmp;
11786 /* Add OpenACC joining and reduction markers just after the loop. */
11787 if (oacc_tail)
11788 gimple_seq_add_seq (&body, oacc_tail);
11790 pop_gimplify_context (new_stmt);
11792 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11793 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11794 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11795 if (BLOCK_VARS (block))
11796 TREE_USED (block) = 1;
11798 gimple_bind_set_body (new_stmt, body);
11799 gimple_omp_set_body (stmt, NULL);
11800 gimple_omp_for_set_pre_body (stmt, NULL);
11803 /* Callback for walk_stmts. Check if the current statement only contains
11804 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11806 static tree
11807 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11808 bool *handled_ops_p,
11809 struct walk_stmt_info *wi)
11811 int *info = (int *) wi->info;
11812 gimple *stmt = gsi_stmt (*gsi_p);
11814 *handled_ops_p = true;
11815 switch (gimple_code (stmt))
11817 WALK_SUBSTMTS;
11819 case GIMPLE_DEBUG:
11820 break;
11821 case GIMPLE_OMP_FOR:
11822 case GIMPLE_OMP_SECTIONS:
11823 *info = *info == 0 ? 1 : -1;
11824 break;
11825 default:
11826 *info = -1;
11827 break;
11829 return NULL;
11832 struct omp_taskcopy_context
11834 /* This field must be at the beginning, as we do "inheritance": Some
11835 callback functions for tree-inline.c (e.g., omp_copy_decl)
11836 receive a copy_body_data pointer that is up-casted to an
11837 omp_context pointer. */
11838 copy_body_data cb;
11839 omp_context *ctx;
11842 static tree
11843 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11845 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11847 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11848 return create_tmp_var (TREE_TYPE (var));
11850 return var;
11853 static tree
11854 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11856 tree name, new_fields = NULL, type, f;
11858 type = lang_hooks.types.make_type (RECORD_TYPE);
11859 name = DECL_NAME (TYPE_NAME (orig_type));
11860 name = build_decl (gimple_location (tcctx->ctx->stmt),
11861 TYPE_DECL, name, type);
11862 TYPE_NAME (type) = name;
11864 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11866 tree new_f = copy_node (f);
11867 DECL_CONTEXT (new_f) = type;
11868 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11869 TREE_CHAIN (new_f) = new_fields;
11870 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11871 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11872 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11873 &tcctx->cb, NULL);
11874 new_fields = new_f;
11875 tcctx->cb.decl_map->put (f, new_f);
11877 TYPE_FIELDS (type) = nreverse (new_fields);
11878 layout_type (type);
11879 return type;
11882 /* Create task copyfn. */
11884 static void
11885 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11887 struct function *child_cfun;
11888 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11889 tree record_type, srecord_type, bind, list;
11890 bool record_needs_remap = false, srecord_needs_remap = false;
11891 splay_tree_node n;
11892 struct omp_taskcopy_context tcctx;
11893 location_t loc = gimple_location (task_stmt);
11894 size_t looptempno = 0;
11896 child_fn = gimple_omp_task_copy_fn (task_stmt);
11897 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11898 gcc_assert (child_cfun->cfg == NULL);
11899 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11901 /* Reset DECL_CONTEXT on function arguments. */
11902 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11903 DECL_CONTEXT (t) = child_fn;
11905 /* Populate the function. */
11906 push_gimplify_context ();
11907 push_cfun (child_cfun);
11909 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11910 TREE_SIDE_EFFECTS (bind) = 1;
11911 list = NULL;
11912 DECL_SAVED_TREE (child_fn) = bind;
11913 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
11915 /* Remap src and dst argument types if needed. */
11916 record_type = ctx->record_type;
11917 srecord_type = ctx->srecord_type;
11918 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
11919 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11921 record_needs_remap = true;
11922 break;
11924 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
11925 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11927 srecord_needs_remap = true;
11928 break;
11931 if (record_needs_remap || srecord_needs_remap)
11933 memset (&tcctx, '\0', sizeof (tcctx));
11934 tcctx.cb.src_fn = ctx->cb.src_fn;
11935 tcctx.cb.dst_fn = child_fn;
11936 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
11937 gcc_checking_assert (tcctx.cb.src_node);
11938 tcctx.cb.dst_node = tcctx.cb.src_node;
11939 tcctx.cb.src_cfun = ctx->cb.src_cfun;
11940 tcctx.cb.copy_decl = task_copyfn_copy_decl;
11941 tcctx.cb.eh_lp_nr = 0;
11942 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
11943 tcctx.cb.decl_map = new hash_map<tree, tree>;
11944 tcctx.ctx = ctx;
11946 if (record_needs_remap)
11947 record_type = task_copyfn_remap_type (&tcctx, record_type);
11948 if (srecord_needs_remap)
11949 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
11951 else
11952 tcctx.cb.decl_map = NULL;
11954 arg = DECL_ARGUMENTS (child_fn);
11955 TREE_TYPE (arg) = build_pointer_type (record_type);
11956 sarg = DECL_CHAIN (arg);
11957 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
11959 /* First pass: initialize temporaries used in record_type and srecord_type
11960 sizes and field offsets. */
11961 if (tcctx.cb.decl_map)
11962 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11963 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11965 tree *p;
11967 decl = OMP_CLAUSE_DECL (c);
11968 p = tcctx.cb.decl_map->get (decl);
11969 if (p == NULL)
11970 continue;
11971 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11972 sf = (tree) n->value;
11973 sf = *tcctx.cb.decl_map->get (sf);
11974 src = build_simple_mem_ref_loc (loc, sarg);
11975 src = omp_build_component_ref (src, sf);
11976 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
11977 append_to_statement_list (t, &list);
11980 /* Second pass: copy shared var pointers and copy construct non-VLA
11981 firstprivate vars. */
11982 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11983 switch (OMP_CLAUSE_CODE (c))
11985 splay_tree_key key;
11986 case OMP_CLAUSE_SHARED:
11987 decl = OMP_CLAUSE_DECL (c);
11988 key = (splay_tree_key) decl;
11989 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
11990 key = (splay_tree_key) &DECL_UID (decl);
11991 n = splay_tree_lookup (ctx->field_map, key);
11992 if (n == NULL)
11993 break;
11994 f = (tree) n->value;
11995 if (tcctx.cb.decl_map)
11996 f = *tcctx.cb.decl_map->get (f);
11997 n = splay_tree_lookup (ctx->sfield_map, key);
11998 sf = (tree) n->value;
11999 if (tcctx.cb.decl_map)
12000 sf = *tcctx.cb.decl_map->get (sf);
12001 src = build_simple_mem_ref_loc (loc, sarg);
12002 src = omp_build_component_ref (src, sf);
12003 dst = build_simple_mem_ref_loc (loc, arg);
12004 dst = omp_build_component_ref (dst, f);
12005 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12006 append_to_statement_list (t, &list);
12007 break;
12008 case OMP_CLAUSE_REDUCTION:
12009 case OMP_CLAUSE_IN_REDUCTION:
12010 decl = OMP_CLAUSE_DECL (c);
12011 if (TREE_CODE (decl) == MEM_REF)
12013 decl = TREE_OPERAND (decl, 0);
12014 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12015 decl = TREE_OPERAND (decl, 0);
12016 if (TREE_CODE (decl) == INDIRECT_REF
12017 || TREE_CODE (decl) == ADDR_EXPR)
12018 decl = TREE_OPERAND (decl, 0);
12020 key = (splay_tree_key) decl;
12021 n = splay_tree_lookup (ctx->field_map, key);
12022 if (n == NULL)
12023 break;
12024 f = (tree) n->value;
12025 if (tcctx.cb.decl_map)
12026 f = *tcctx.cb.decl_map->get (f);
12027 n = splay_tree_lookup (ctx->sfield_map, key);
12028 sf = (tree) n->value;
12029 if (tcctx.cb.decl_map)
12030 sf = *tcctx.cb.decl_map->get (sf);
12031 src = build_simple_mem_ref_loc (loc, sarg);
12032 src = omp_build_component_ref (src, sf);
12033 if (decl != OMP_CLAUSE_DECL (c)
12034 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12035 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12036 src = build_simple_mem_ref_loc (loc, src);
12037 dst = build_simple_mem_ref_loc (loc, arg);
12038 dst = omp_build_component_ref (dst, f);
12039 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12040 append_to_statement_list (t, &list);
12041 break;
12042 case OMP_CLAUSE__LOOPTEMP_:
12043 /* Fields for first two _looptemp_ clauses are initialized by
12044 GOMP_taskloop*, the rest are handled like firstprivate. */
12045 if (looptempno < 2)
12047 looptempno++;
12048 break;
12050 /* FALLTHRU */
12051 case OMP_CLAUSE__REDUCTEMP_:
12052 case OMP_CLAUSE_FIRSTPRIVATE:
12053 decl = OMP_CLAUSE_DECL (c);
12054 if (is_variable_sized (decl))
12055 break;
12056 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12057 if (n == NULL)
12058 break;
12059 f = (tree) n->value;
12060 if (tcctx.cb.decl_map)
12061 f = *tcctx.cb.decl_map->get (f);
12062 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12063 if (n != NULL)
12065 sf = (tree) n->value;
12066 if (tcctx.cb.decl_map)
12067 sf = *tcctx.cb.decl_map->get (sf);
12068 src = build_simple_mem_ref_loc (loc, sarg);
12069 src = omp_build_component_ref (src, sf);
12070 if (use_pointer_for_field (decl, NULL)
12071 || omp_privatize_by_reference (decl))
12072 src = build_simple_mem_ref_loc (loc, src);
12074 else
12075 src = decl;
12076 dst = build_simple_mem_ref_loc (loc, arg);
12077 dst = omp_build_component_ref (dst, f);
12078 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12079 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12080 else
12082 if (ctx->allocate_map)
12083 if (tree *allocatorp = ctx->allocate_map->get (decl))
12085 tree allocator = *allocatorp;
12086 HOST_WIDE_INT ialign = 0;
12087 if (TREE_CODE (allocator) == TREE_LIST)
12089 ialign = tree_to_uhwi (TREE_VALUE (allocator));
12090 allocator = TREE_PURPOSE (allocator);
12092 if (TREE_CODE (allocator) != INTEGER_CST)
12094 n = splay_tree_lookup (ctx->sfield_map,
12095 (splay_tree_key) allocator);
12096 allocator = (tree) n->value;
12097 if (tcctx.cb.decl_map)
12098 allocator = *tcctx.cb.decl_map->get (allocator);
12099 tree a = build_simple_mem_ref_loc (loc, sarg);
12100 allocator = omp_build_component_ref (a, allocator);
12102 allocator = fold_convert (pointer_sized_int_node, allocator);
12103 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12104 tree align = build_int_cst (size_type_node,
12105 MAX (ialign,
12106 DECL_ALIGN_UNIT (decl)));
12107 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12108 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12109 allocator);
12110 ptr = fold_convert (TREE_TYPE (dst), ptr);
12111 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12112 append_to_statement_list (t, &list);
12113 dst = build_simple_mem_ref_loc (loc, dst);
12115 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12117 append_to_statement_list (t, &list);
12118 break;
12119 case OMP_CLAUSE_PRIVATE:
12120 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12121 break;
12122 decl = OMP_CLAUSE_DECL (c);
12123 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12124 f = (tree) n->value;
12125 if (tcctx.cb.decl_map)
12126 f = *tcctx.cb.decl_map->get (f);
12127 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12128 if (n != NULL)
12130 sf = (tree) n->value;
12131 if (tcctx.cb.decl_map)
12132 sf = *tcctx.cb.decl_map->get (sf);
12133 src = build_simple_mem_ref_loc (loc, sarg);
12134 src = omp_build_component_ref (src, sf);
12135 if (use_pointer_for_field (decl, NULL))
12136 src = build_simple_mem_ref_loc (loc, src);
12138 else
12139 src = decl;
12140 dst = build_simple_mem_ref_loc (loc, arg);
12141 dst = omp_build_component_ref (dst, f);
12142 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12143 append_to_statement_list (t, &list);
12144 break;
12145 default:
12146 break;
12149 /* Last pass: handle VLA firstprivates. */
12150 if (tcctx.cb.decl_map)
12151 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12152 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12154 tree ind, ptr, df;
12156 decl = OMP_CLAUSE_DECL (c);
12157 if (!is_variable_sized (decl))
12158 continue;
12159 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12160 if (n == NULL)
12161 continue;
12162 f = (tree) n->value;
12163 f = *tcctx.cb.decl_map->get (f);
12164 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12165 ind = DECL_VALUE_EXPR (decl);
12166 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12167 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12168 n = splay_tree_lookup (ctx->sfield_map,
12169 (splay_tree_key) TREE_OPERAND (ind, 0));
12170 sf = (tree) n->value;
12171 sf = *tcctx.cb.decl_map->get (sf);
12172 src = build_simple_mem_ref_loc (loc, sarg);
12173 src = omp_build_component_ref (src, sf);
12174 src = build_simple_mem_ref_loc (loc, src);
12175 dst = build_simple_mem_ref_loc (loc, arg);
12176 dst = omp_build_component_ref (dst, f);
12177 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12178 append_to_statement_list (t, &list);
12179 n = splay_tree_lookup (ctx->field_map,
12180 (splay_tree_key) TREE_OPERAND (ind, 0));
12181 df = (tree) n->value;
12182 df = *tcctx.cb.decl_map->get (df);
12183 ptr = build_simple_mem_ref_loc (loc, arg);
12184 ptr = omp_build_component_ref (ptr, df);
12185 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12186 build_fold_addr_expr_loc (loc, dst));
12187 append_to_statement_list (t, &list);
12190 t = build1 (RETURN_EXPR, void_type_node, NULL);
12191 append_to_statement_list (t, &list);
12193 if (tcctx.cb.decl_map)
12194 delete tcctx.cb.decl_map;
12195 pop_gimplify_context (NULL);
12196 BIND_EXPR_BODY (bind) = list;
12197 pop_cfun ();
12200 static void
12201 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12203 tree c, clauses;
12204 gimple *g;
12205 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
12207 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12208 gcc_assert (clauses);
12209 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12210 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12211 switch (OMP_CLAUSE_DEPEND_KIND (c))
12213 case OMP_CLAUSE_DEPEND_LAST:
12214 /* Lowering already done at gimplification. */
12215 return;
12216 case OMP_CLAUSE_DEPEND_IN:
12217 cnt[2]++;
12218 break;
12219 case OMP_CLAUSE_DEPEND_OUT:
12220 case OMP_CLAUSE_DEPEND_INOUT:
12221 cnt[0]++;
12222 break;
12223 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12224 cnt[1]++;
12225 break;
12226 case OMP_CLAUSE_DEPEND_DEPOBJ:
12227 cnt[3]++;
12228 break;
12229 case OMP_CLAUSE_DEPEND_SOURCE:
12230 case OMP_CLAUSE_DEPEND_SINK:
12231 /* FALLTHRU */
12232 default:
12233 gcc_unreachable ();
12235 if (cnt[1] || cnt[3])
12236 idx = 5;
12237 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
12238 tree type = build_array_type_nelts (ptr_type_node, total + idx);
12239 tree array = create_tmp_var (type);
12240 TREE_ADDRESSABLE (array) = 1;
12241 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12242 NULL_TREE);
12243 if (idx == 5)
12245 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12246 gimple_seq_add_stmt (iseq, g);
12247 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12248 NULL_TREE);
12250 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12251 gimple_seq_add_stmt (iseq, g);
12252 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12254 r = build4 (ARRAY_REF, ptr_type_node, array,
12255 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12256 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12257 gimple_seq_add_stmt (iseq, g);
12259 for (i = 0; i < 4; i++)
12261 if (cnt[i] == 0)
12262 continue;
12263 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12264 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12265 continue;
12266 else
12268 switch (OMP_CLAUSE_DEPEND_KIND (c))
12270 case OMP_CLAUSE_DEPEND_IN:
12271 if (i != 2)
12272 continue;
12273 break;
12274 case OMP_CLAUSE_DEPEND_OUT:
12275 case OMP_CLAUSE_DEPEND_INOUT:
12276 if (i != 0)
12277 continue;
12278 break;
12279 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12280 if (i != 1)
12281 continue;
12282 break;
12283 case OMP_CLAUSE_DEPEND_DEPOBJ:
12284 if (i != 3)
12285 continue;
12286 break;
12287 default:
12288 gcc_unreachable ();
12290 tree t = OMP_CLAUSE_DECL (c);
12291 t = fold_convert (ptr_type_node, t);
12292 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12293 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12294 NULL_TREE, NULL_TREE);
12295 g = gimple_build_assign (r, t);
12296 gimple_seq_add_stmt (iseq, g);
12299 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12300 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12301 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12302 OMP_CLAUSE_CHAIN (c) = *pclauses;
12303 *pclauses = c;
12304 tree clobber = build_clobber (type);
12305 g = gimple_build_assign (array, clobber);
12306 gimple_seq_add_stmt (oseq, g);
12309 /* Lower the OpenMP parallel or task directive in the current statement
12310 in GSI_P. CTX holds context information for the directive. */
12312 static void
12313 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12315 tree clauses;
12316 tree child_fn, t;
12317 gimple *stmt = gsi_stmt (*gsi_p);
12318 gbind *par_bind, *bind, *dep_bind = NULL;
12319 gimple_seq par_body;
12320 location_t loc = gimple_location (stmt);
12322 clauses = gimple_omp_taskreg_clauses (stmt);
12323 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12324 && gimple_omp_task_taskwait_p (stmt))
12326 par_bind = NULL;
12327 par_body = NULL;
12329 else
12331 par_bind
12332 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12333 par_body = gimple_bind_body (par_bind);
12335 child_fn = ctx->cb.dst_fn;
12336 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12337 && !gimple_omp_parallel_combined_p (stmt))
12339 struct walk_stmt_info wi;
12340 int ws_num = 0;
12342 memset (&wi, 0, sizeof (wi));
12343 wi.info = &ws_num;
12344 wi.val_only = true;
12345 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12346 if (ws_num == 1)
12347 gimple_omp_parallel_set_combined_p (stmt, true);
12349 gimple_seq dep_ilist = NULL;
12350 gimple_seq dep_olist = NULL;
12351 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12352 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12354 push_gimplify_context ();
12355 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12356 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12357 &dep_ilist, &dep_olist);
12360 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12361 && gimple_omp_task_taskwait_p (stmt))
12363 if (dep_bind)
12365 gsi_replace (gsi_p, dep_bind, true);
12366 gimple_bind_add_seq (dep_bind, dep_ilist);
12367 gimple_bind_add_stmt (dep_bind, stmt);
12368 gimple_bind_add_seq (dep_bind, dep_olist);
12369 pop_gimplify_context (dep_bind);
12371 return;
12374 if (ctx->srecord_type)
12375 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12377 gimple_seq tskred_ilist = NULL;
12378 gimple_seq tskred_olist = NULL;
12379 if ((is_task_ctx (ctx)
12380 && gimple_omp_task_taskloop_p (ctx->stmt)
12381 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12382 OMP_CLAUSE_REDUCTION))
12383 || (is_parallel_ctx (ctx)
12384 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12385 OMP_CLAUSE__REDUCTEMP_)))
12387 if (dep_bind == NULL)
12389 push_gimplify_context ();
12390 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12392 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12393 : OMP_PARALLEL,
12394 gimple_omp_taskreg_clauses (ctx->stmt),
12395 &tskred_ilist, &tskred_olist);
12398 push_gimplify_context ();
12400 gimple_seq par_olist = NULL;
12401 gimple_seq par_ilist = NULL;
12402 gimple_seq par_rlist = NULL;
12403 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12404 lower_omp (&par_body, ctx);
12405 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12406 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12408 /* Declare all the variables created by mapping and the variables
12409 declared in the scope of the parallel body. */
12410 record_vars_into (ctx->block_vars, child_fn);
12411 maybe_remove_omp_member_access_dummy_vars (par_bind);
12412 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12414 if (ctx->record_type)
12416 ctx->sender_decl
12417 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12418 : ctx->record_type, ".omp_data_o");
12419 DECL_NAMELESS (ctx->sender_decl) = 1;
12420 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12421 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12424 gimple_seq olist = NULL;
12425 gimple_seq ilist = NULL;
12426 lower_send_clauses (clauses, &ilist, &olist, ctx);
12427 lower_send_shared_vars (&ilist, &olist, ctx);
12429 if (ctx->record_type)
12431 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12432 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12433 clobber));
12436 /* Once all the expansions are done, sequence all the different
12437 fragments inside gimple_omp_body. */
12439 gimple_seq new_body = NULL;
12441 if (ctx->record_type)
12443 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12444 /* fixup_child_record_type might have changed receiver_decl's type. */
12445 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12446 gimple_seq_add_stmt (&new_body,
12447 gimple_build_assign (ctx->receiver_decl, t));
12450 gimple_seq_add_seq (&new_body, par_ilist);
12451 gimple_seq_add_seq (&new_body, par_body);
12452 gimple_seq_add_seq (&new_body, par_rlist);
12453 if (ctx->cancellable)
12454 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12455 gimple_seq_add_seq (&new_body, par_olist);
12456 new_body = maybe_catch_exception (new_body);
12457 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12458 gimple_seq_add_stmt (&new_body,
12459 gimple_build_omp_continue (integer_zero_node,
12460 integer_zero_node));
12461 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12462 gimple_omp_set_body (stmt, new_body);
12464 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12465 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12466 else
12467 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12468 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12469 gimple_bind_add_seq (bind, ilist);
12470 gimple_bind_add_stmt (bind, stmt);
12471 gimple_bind_add_seq (bind, olist);
12473 pop_gimplify_context (NULL);
12475 if (dep_bind)
12477 gimple_bind_add_seq (dep_bind, dep_ilist);
12478 gimple_bind_add_seq (dep_bind, tskred_ilist);
12479 gimple_bind_add_stmt (dep_bind, bind);
12480 gimple_bind_add_seq (dep_bind, tskred_olist);
12481 gimple_bind_add_seq (dep_bind, dep_olist);
12482 pop_gimplify_context (dep_bind);
12486 /* Lower the GIMPLE_OMP_TARGET in the current statement
12487 in GSI_P. CTX holds context information for the directive. */
12489 static void
12490 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12492 tree clauses;
12493 tree child_fn, t, c;
12494 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12495 gbind *tgt_bind, *bind, *dep_bind = NULL;
12496 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12497 location_t loc = gimple_location (stmt);
12498 bool offloaded, data_region;
12499 unsigned int map_cnt = 0;
12500 tree in_reduction_clauses = NULL_TREE;
12502 offloaded = is_gimple_omp_offloaded (stmt);
12503 switch (gimple_omp_target_kind (stmt))
12505 case GF_OMP_TARGET_KIND_REGION:
12506 tree *p, *q;
12507 q = &in_reduction_clauses;
12508 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12509 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12511 *q = *p;
12512 q = &OMP_CLAUSE_CHAIN (*q);
12513 *p = OMP_CLAUSE_CHAIN (*p);
12515 else
12516 p = &OMP_CLAUSE_CHAIN (*p);
12517 *q = NULL_TREE;
12518 *p = in_reduction_clauses;
12519 /* FALLTHRU */
12520 case GF_OMP_TARGET_KIND_UPDATE:
12521 case GF_OMP_TARGET_KIND_ENTER_DATA:
12522 case GF_OMP_TARGET_KIND_EXIT_DATA:
12523 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12524 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12525 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12526 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12527 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12528 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12529 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12530 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12531 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12532 data_region = false;
12533 break;
12534 case GF_OMP_TARGET_KIND_DATA:
12535 case GF_OMP_TARGET_KIND_OACC_DATA:
12536 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12537 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12538 data_region = true;
12539 break;
12540 default:
12541 gcc_unreachable ();
12544 clauses = gimple_omp_target_clauses (stmt);
12546 gimple_seq dep_ilist = NULL;
12547 gimple_seq dep_olist = NULL;
12548 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12549 if (has_depend || in_reduction_clauses)
12551 push_gimplify_context ();
12552 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12553 if (has_depend)
12554 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12555 &dep_ilist, &dep_olist);
12556 if (in_reduction_clauses)
12557 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12558 ctx, NULL);
12561 tgt_bind = NULL;
12562 tgt_body = NULL;
12563 if (offloaded)
12565 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12566 tgt_body = gimple_bind_body (tgt_bind);
12568 else if (data_region)
12569 tgt_body = gimple_omp_body (stmt);
12570 child_fn = ctx->cb.dst_fn;
12572 push_gimplify_context ();
12573 fplist = NULL;
12575 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12576 switch (OMP_CLAUSE_CODE (c))
12578 tree var, x;
12580 default:
12581 break;
12582 case OMP_CLAUSE_MAP:
12583 #if CHECKING_P
12584 /* First check what we're prepared to handle in the following. */
12585 switch (OMP_CLAUSE_MAP_KIND (c))
12587 case GOMP_MAP_ALLOC:
12588 case GOMP_MAP_TO:
12589 case GOMP_MAP_FROM:
12590 case GOMP_MAP_TOFROM:
12591 case GOMP_MAP_POINTER:
12592 case GOMP_MAP_TO_PSET:
12593 case GOMP_MAP_DELETE:
12594 case GOMP_MAP_RELEASE:
12595 case GOMP_MAP_ALWAYS_TO:
12596 case GOMP_MAP_ALWAYS_FROM:
12597 case GOMP_MAP_ALWAYS_TOFROM:
12598 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12599 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12600 case GOMP_MAP_STRUCT:
12601 case GOMP_MAP_ALWAYS_POINTER:
12602 case GOMP_MAP_ATTACH:
12603 case GOMP_MAP_DETACH:
12604 break;
12605 case GOMP_MAP_IF_PRESENT:
12606 case GOMP_MAP_FORCE_ALLOC:
12607 case GOMP_MAP_FORCE_TO:
12608 case GOMP_MAP_FORCE_FROM:
12609 case GOMP_MAP_FORCE_TOFROM:
12610 case GOMP_MAP_FORCE_PRESENT:
12611 case GOMP_MAP_FORCE_DEVICEPTR:
12612 case GOMP_MAP_DEVICE_RESIDENT:
12613 case GOMP_MAP_LINK:
12614 case GOMP_MAP_FORCE_DETACH:
12615 gcc_assert (is_gimple_omp_oacc (stmt));
12616 break;
12617 default:
12618 gcc_unreachable ();
12620 #endif
12621 /* FALLTHRU */
12622 case OMP_CLAUSE_TO:
12623 case OMP_CLAUSE_FROM:
12624 oacc_firstprivate:
12625 var = OMP_CLAUSE_DECL (c);
12626 if (!DECL_P (var))
12628 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12629 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12630 && (OMP_CLAUSE_MAP_KIND (c)
12631 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12632 map_cnt++;
12633 continue;
12636 if (DECL_SIZE (var)
12637 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12639 tree var2 = DECL_VALUE_EXPR (var);
12640 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12641 var2 = TREE_OPERAND (var2, 0);
12642 gcc_assert (DECL_P (var2));
12643 var = var2;
12646 if (offloaded
12647 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12648 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12649 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12651 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12653 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12654 && varpool_node::get_create (var)->offloadable)
12655 continue;
12657 tree type = build_pointer_type (TREE_TYPE (var));
12658 tree new_var = lookup_decl (var, ctx);
12659 x = create_tmp_var_raw (type, get_name (new_var));
12660 gimple_add_tmp_var (x);
12661 x = build_simple_mem_ref (x);
12662 SET_DECL_VALUE_EXPR (new_var, x);
12663 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12665 continue;
12668 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12669 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12670 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12671 && is_omp_target (stmt))
12673 gcc_assert (maybe_lookup_field (c, ctx));
12674 map_cnt++;
12675 continue;
12678 if (!maybe_lookup_field (var, ctx))
12679 continue;
12681 /* Don't remap compute constructs' reduction variables, because the
12682 intermediate result must be local to each gang. */
12683 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12684 && is_gimple_omp_oacc (ctx->stmt)
12685 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12687 x = build_receiver_ref (var, true, ctx);
12688 tree new_var = lookup_decl (var, ctx);
12690 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12691 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12692 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12693 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12694 x = build_simple_mem_ref (x);
12695 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12697 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12698 if (omp_privatize_by_reference (new_var)
12699 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12700 || DECL_BY_REFERENCE (var)))
12702 /* Create a local object to hold the instance
12703 value. */
12704 tree type = TREE_TYPE (TREE_TYPE (new_var));
12705 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12706 tree inst = create_tmp_var (type, id);
12707 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12708 x = build_fold_addr_expr (inst);
12710 gimplify_assign (new_var, x, &fplist);
12712 else if (DECL_P (new_var))
12714 SET_DECL_VALUE_EXPR (new_var, x);
12715 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12717 else
12718 gcc_unreachable ();
12720 map_cnt++;
12721 break;
12723 case OMP_CLAUSE_FIRSTPRIVATE:
12724 gcc_checking_assert (offloaded);
12725 if (is_gimple_omp_oacc (ctx->stmt))
12727 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12728 gcc_checking_assert (!is_oacc_kernels (ctx));
12729 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12730 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12732 goto oacc_firstprivate;
12734 map_cnt++;
12735 var = OMP_CLAUSE_DECL (c);
12736 if (!omp_privatize_by_reference (var)
12737 && !is_gimple_reg_type (TREE_TYPE (var)))
12739 tree new_var = lookup_decl (var, ctx);
12740 if (is_variable_sized (var))
12742 tree pvar = DECL_VALUE_EXPR (var);
12743 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12744 pvar = TREE_OPERAND (pvar, 0);
12745 gcc_assert (DECL_P (pvar));
12746 tree new_pvar = lookup_decl (pvar, ctx);
12747 x = build_fold_indirect_ref (new_pvar);
12748 TREE_THIS_NOTRAP (x) = 1;
12750 else
12751 x = build_receiver_ref (var, true, ctx);
12752 SET_DECL_VALUE_EXPR (new_var, x);
12753 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12755 break;
12757 case OMP_CLAUSE_PRIVATE:
12758 gcc_checking_assert (offloaded);
12759 if (is_gimple_omp_oacc (ctx->stmt))
12761 /* No 'private' clauses on OpenACC 'kernels'. */
12762 gcc_checking_assert (!is_oacc_kernels (ctx));
12763 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12764 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12766 break;
12768 var = OMP_CLAUSE_DECL (c);
12769 if (is_variable_sized (var))
12771 tree new_var = lookup_decl (var, ctx);
12772 tree pvar = DECL_VALUE_EXPR (var);
12773 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12774 pvar = TREE_OPERAND (pvar, 0);
12775 gcc_assert (DECL_P (pvar));
12776 tree new_pvar = lookup_decl (pvar, ctx);
12777 x = build_fold_indirect_ref (new_pvar);
12778 TREE_THIS_NOTRAP (x) = 1;
12779 SET_DECL_VALUE_EXPR (new_var, x);
12780 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12782 break;
12784 case OMP_CLAUSE_USE_DEVICE_PTR:
12785 case OMP_CLAUSE_USE_DEVICE_ADDR:
12786 case OMP_CLAUSE_IS_DEVICE_PTR:
12787 var = OMP_CLAUSE_DECL (c);
12788 map_cnt++;
12789 if (is_variable_sized (var))
12791 tree new_var = lookup_decl (var, ctx);
12792 tree pvar = DECL_VALUE_EXPR (var);
12793 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12794 pvar = TREE_OPERAND (pvar, 0);
12795 gcc_assert (DECL_P (pvar));
12796 tree new_pvar = lookup_decl (pvar, ctx);
12797 x = build_fold_indirect_ref (new_pvar);
12798 TREE_THIS_NOTRAP (x) = 1;
12799 SET_DECL_VALUE_EXPR (new_var, x);
12800 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12802 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12803 && !omp_privatize_by_reference (var)
12804 && !omp_is_allocatable_or_ptr (var)
12805 && !lang_hooks.decls.omp_array_data (var, true))
12806 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12808 tree new_var = lookup_decl (var, ctx);
12809 tree type = build_pointer_type (TREE_TYPE (var));
12810 x = create_tmp_var_raw (type, get_name (new_var));
12811 gimple_add_tmp_var (x);
12812 x = build_simple_mem_ref (x);
12813 SET_DECL_VALUE_EXPR (new_var, x);
12814 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12816 else
12818 tree new_var = lookup_decl (var, ctx);
12819 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12820 gimple_add_tmp_var (x);
12821 SET_DECL_VALUE_EXPR (new_var, x);
12822 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12824 break;
12827 if (offloaded)
12829 target_nesting_level++;
12830 lower_omp (&tgt_body, ctx);
12831 target_nesting_level--;
12833 else if (data_region)
12834 lower_omp (&tgt_body, ctx);
12836 if (offloaded)
12838 /* Declare all the variables created by mapping and the variables
12839 declared in the scope of the target body. */
12840 record_vars_into (ctx->block_vars, child_fn);
12841 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
12842 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
12845 olist = NULL;
12846 ilist = NULL;
12847 if (ctx->record_type)
12849 ctx->sender_decl
12850 = create_tmp_var (ctx->record_type, ".omp_data_arr");
12851 DECL_NAMELESS (ctx->sender_decl) = 1;
12852 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12853 t = make_tree_vec (3);
12854 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12855 TREE_VEC_ELT (t, 1)
12856 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12857 ".omp_data_sizes");
12858 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12859 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12860 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12861 tree tkind_type = short_unsigned_type_node;
12862 int talign_shift = 8;
12863 TREE_VEC_ELT (t, 2)
12864 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12865 ".omp_data_kinds");
12866 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12867 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12868 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12869 gimple_omp_target_set_data_arg (stmt, t);
12871 vec<constructor_elt, va_gc> *vsize;
12872 vec<constructor_elt, va_gc> *vkind;
12873 vec_alloc (vsize, map_cnt);
12874 vec_alloc (vkind, map_cnt);
12875 unsigned int map_idx = 0;
12877 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12878 switch (OMP_CLAUSE_CODE (c))
12880 tree ovar, nc, s, purpose, var, x, type;
12881 unsigned int talign;
12883 default:
12884 break;
12886 case OMP_CLAUSE_MAP:
12887 case OMP_CLAUSE_TO:
12888 case OMP_CLAUSE_FROM:
12889 oacc_firstprivate_map:
12890 nc = c;
12891 ovar = OMP_CLAUSE_DECL (c);
12892 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12893 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12894 || (OMP_CLAUSE_MAP_KIND (c)
12895 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12896 break;
12897 if (!DECL_P (ovar))
12899 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12900 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
12902 nc = OMP_CLAUSE_CHAIN (c);
12903 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
12904 == get_base_address (ovar));
12905 ovar = OMP_CLAUSE_DECL (nc);
12907 else
12909 tree x = build_sender_ref (ovar, ctx);
12910 tree v = ovar;
12911 if (in_reduction_clauses
12912 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12913 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
12915 v = unshare_expr (v);
12916 tree *p = &v;
12917 while (handled_component_p (*p)
12918 || TREE_CODE (*p) == INDIRECT_REF
12919 || TREE_CODE (*p) == ADDR_EXPR
12920 || TREE_CODE (*p) == MEM_REF
12921 || TREE_CODE (*p) == NON_LVALUE_EXPR)
12922 p = &TREE_OPERAND (*p, 0);
12923 tree d = *p;
12924 if (is_variable_sized (d))
12926 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
12927 d = DECL_VALUE_EXPR (d);
12928 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
12929 d = TREE_OPERAND (d, 0);
12930 gcc_assert (DECL_P (d));
12932 splay_tree_key key
12933 = (splay_tree_key) &DECL_CONTEXT (d);
12934 tree nd = (tree) splay_tree_lookup (ctx->field_map,
12935 key)->value;
12936 if (d == *p)
12937 *p = nd;
12938 else
12939 *p = build_fold_indirect_ref (nd);
12941 v = build_fold_addr_expr_with_type (v, ptr_type_node);
12942 gimplify_assign (x, v, &ilist);
12943 nc = NULL_TREE;
12946 else
12948 if (DECL_SIZE (ovar)
12949 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
12951 tree ovar2 = DECL_VALUE_EXPR (ovar);
12952 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
12953 ovar2 = TREE_OPERAND (ovar2, 0);
12954 gcc_assert (DECL_P (ovar2));
12955 ovar = ovar2;
12957 if (!maybe_lookup_field (ovar, ctx)
12958 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12959 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12960 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
12961 continue;
12964 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
12965 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
12966 talign = DECL_ALIGN_UNIT (ovar);
12968 var = NULL_TREE;
12969 if (nc)
12971 if (in_reduction_clauses
12972 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12973 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
12975 tree d = ovar;
12976 if (is_variable_sized (d))
12978 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
12979 d = DECL_VALUE_EXPR (d);
12980 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
12981 d = TREE_OPERAND (d, 0);
12982 gcc_assert (DECL_P (d));
12984 splay_tree_key key
12985 = (splay_tree_key) &DECL_CONTEXT (d);
12986 tree nd = (tree) splay_tree_lookup (ctx->field_map,
12987 key)->value;
12988 if (d == ovar)
12989 var = nd;
12990 else
12991 var = build_fold_indirect_ref (nd);
12993 else
12994 var = lookup_decl_in_outer_ctx (ovar, ctx);
12996 if (nc
12997 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12998 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12999 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13000 && is_omp_target (stmt))
13002 x = build_sender_ref (c, ctx);
13003 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13005 else if (nc)
13007 x = build_sender_ref (ovar, ctx);
13009 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13010 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13011 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13012 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13014 gcc_assert (offloaded);
13015 tree avar
13016 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13017 mark_addressable (avar);
13018 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13019 talign = DECL_ALIGN_UNIT (avar);
13020 avar = build_fold_addr_expr (avar);
13021 gimplify_assign (x, avar, &ilist);
13023 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13025 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13026 if (!omp_privatize_by_reference (var))
13028 if (is_gimple_reg (var)
13029 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13030 suppress_warning (var);
13031 var = build_fold_addr_expr (var);
13033 else
13034 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13035 gimplify_assign (x, var, &ilist);
13037 else if (is_gimple_reg (var))
13039 gcc_assert (offloaded);
13040 tree avar = create_tmp_var (TREE_TYPE (var));
13041 mark_addressable (avar);
13042 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13043 if (GOMP_MAP_COPY_TO_P (map_kind)
13044 || map_kind == GOMP_MAP_POINTER
13045 || map_kind == GOMP_MAP_TO_PSET
13046 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13048 /* If we need to initialize a temporary
13049 with VAR because it is not addressable, and
13050 the variable hasn't been initialized yet, then
13051 we'll get a warning for the store to avar.
13052 Don't warn in that case, the mapping might
13053 be implicit. */
13054 suppress_warning (var, OPT_Wuninitialized);
13055 gimplify_assign (avar, var, &ilist);
13057 avar = build_fold_addr_expr (avar);
13058 gimplify_assign (x, avar, &ilist);
13059 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13060 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13061 && !TYPE_READONLY (TREE_TYPE (var)))
13063 x = unshare_expr (x);
13064 x = build_simple_mem_ref (x);
13065 gimplify_assign (var, x, &olist);
13068 else
13070 /* While MAP is handled explicitly by the FE,
13071 for 'target update', only the identified is passed. */
13072 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13073 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13074 && (omp_is_allocatable_or_ptr (var)
13075 && omp_check_optional_argument (var, false)))
13076 var = build_fold_indirect_ref (var);
13077 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13078 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13079 || (!omp_is_allocatable_or_ptr (var)
13080 && !omp_check_optional_argument (var, false)))
13081 var = build_fold_addr_expr (var);
13082 gimplify_assign (x, var, &ilist);
13085 s = NULL_TREE;
13086 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13088 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13089 s = TREE_TYPE (ovar);
13090 if (TREE_CODE (s) == REFERENCE_TYPE
13091 || omp_check_optional_argument (ovar, false))
13092 s = TREE_TYPE (s);
13093 s = TYPE_SIZE_UNIT (s);
13095 else
13096 s = OMP_CLAUSE_SIZE (c);
13097 if (s == NULL_TREE)
13098 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13099 s = fold_convert (size_type_node, s);
13100 purpose = size_int (map_idx++);
13101 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13102 if (TREE_CODE (s) != INTEGER_CST)
13103 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13105 unsigned HOST_WIDE_INT tkind, tkind_zero;
13106 switch (OMP_CLAUSE_CODE (c))
13108 case OMP_CLAUSE_MAP:
13109 tkind = OMP_CLAUSE_MAP_KIND (c);
13110 tkind_zero = tkind;
13111 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13112 switch (tkind)
13114 case GOMP_MAP_ALLOC:
13115 case GOMP_MAP_IF_PRESENT:
13116 case GOMP_MAP_TO:
13117 case GOMP_MAP_FROM:
13118 case GOMP_MAP_TOFROM:
13119 case GOMP_MAP_ALWAYS_TO:
13120 case GOMP_MAP_ALWAYS_FROM:
13121 case GOMP_MAP_ALWAYS_TOFROM:
13122 case GOMP_MAP_RELEASE:
13123 case GOMP_MAP_FORCE_TO:
13124 case GOMP_MAP_FORCE_FROM:
13125 case GOMP_MAP_FORCE_TOFROM:
13126 case GOMP_MAP_FORCE_PRESENT:
13127 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13128 break;
13129 case GOMP_MAP_DELETE:
13130 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13131 default:
13132 break;
13134 if (tkind_zero != tkind)
13136 if (integer_zerop (s))
13137 tkind = tkind_zero;
13138 else if (integer_nonzerop (s))
13139 tkind_zero = tkind;
13141 break;
13142 case OMP_CLAUSE_FIRSTPRIVATE:
13143 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13144 tkind = GOMP_MAP_TO;
13145 tkind_zero = tkind;
13146 break;
13147 case OMP_CLAUSE_TO:
13148 tkind = GOMP_MAP_TO;
13149 tkind_zero = tkind;
13150 break;
13151 case OMP_CLAUSE_FROM:
13152 tkind = GOMP_MAP_FROM;
13153 tkind_zero = tkind;
13154 break;
13155 default:
13156 gcc_unreachable ();
13158 gcc_checking_assert (tkind
13159 < (HOST_WIDE_INT_C (1U) << talign_shift));
13160 gcc_checking_assert (tkind_zero
13161 < (HOST_WIDE_INT_C (1U) << talign_shift));
13162 talign = ceil_log2 (talign);
13163 tkind |= talign << talign_shift;
13164 tkind_zero |= talign << talign_shift;
13165 gcc_checking_assert (tkind
13166 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13167 gcc_checking_assert (tkind_zero
13168 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13169 if (tkind == tkind_zero)
13170 x = build_int_cstu (tkind_type, tkind);
13171 else
13173 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13174 x = build3 (COND_EXPR, tkind_type,
13175 fold_build2 (EQ_EXPR, boolean_type_node,
13176 unshare_expr (s), size_zero_node),
13177 build_int_cstu (tkind_type, tkind_zero),
13178 build_int_cstu (tkind_type, tkind));
13180 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13181 if (nc && nc != c)
13182 c = nc;
13183 break;
13185 case OMP_CLAUSE_FIRSTPRIVATE:
13186 if (is_gimple_omp_oacc (ctx->stmt))
13187 goto oacc_firstprivate_map;
13188 ovar = OMP_CLAUSE_DECL (c);
13189 if (omp_privatize_by_reference (ovar))
13190 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13191 else
13192 talign = DECL_ALIGN_UNIT (ovar);
13193 var = lookup_decl_in_outer_ctx (ovar, ctx);
13194 x = build_sender_ref (ovar, ctx);
13195 tkind = GOMP_MAP_FIRSTPRIVATE;
13196 type = TREE_TYPE (ovar);
13197 if (omp_privatize_by_reference (ovar))
13198 type = TREE_TYPE (type);
13199 if ((INTEGRAL_TYPE_P (type)
13200 && TYPE_PRECISION (type) <= POINTER_SIZE)
13201 || TREE_CODE (type) == POINTER_TYPE)
13203 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13204 tree t = var;
13205 if (omp_privatize_by_reference (var))
13206 t = build_simple_mem_ref (var);
13207 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13208 suppress_warning (var);
13209 if (TREE_CODE (type) != POINTER_TYPE)
13210 t = fold_convert (pointer_sized_int_node, t);
13211 t = fold_convert (TREE_TYPE (x), t);
13212 gimplify_assign (x, t, &ilist);
13214 else if (omp_privatize_by_reference (var))
13215 gimplify_assign (x, var, &ilist);
13216 else if (is_gimple_reg (var))
13218 tree avar = create_tmp_var (TREE_TYPE (var));
13219 mark_addressable (avar);
13220 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13221 suppress_warning (var);
13222 gimplify_assign (avar, var, &ilist);
13223 avar = build_fold_addr_expr (avar);
13224 gimplify_assign (x, avar, &ilist);
13226 else
13228 var = build_fold_addr_expr (var);
13229 gimplify_assign (x, var, &ilist);
13231 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13232 s = size_int (0);
13233 else if (omp_privatize_by_reference (ovar))
13234 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13235 else
13236 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13237 s = fold_convert (size_type_node, s);
13238 purpose = size_int (map_idx++);
13239 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13240 if (TREE_CODE (s) != INTEGER_CST)
13241 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13243 gcc_checking_assert (tkind
13244 < (HOST_WIDE_INT_C (1U) << talign_shift));
13245 talign = ceil_log2 (talign);
13246 tkind |= talign << talign_shift;
13247 gcc_checking_assert (tkind
13248 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13249 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13250 build_int_cstu (tkind_type, tkind));
13251 break;
13253 case OMP_CLAUSE_USE_DEVICE_PTR:
13254 case OMP_CLAUSE_USE_DEVICE_ADDR:
13255 case OMP_CLAUSE_IS_DEVICE_PTR:
13256 ovar = OMP_CLAUSE_DECL (c);
13257 var = lookup_decl_in_outer_ctx (ovar, ctx);
13259 if (lang_hooks.decls.omp_array_data (ovar, true))
13261 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13262 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13263 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13265 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13267 tkind = GOMP_MAP_USE_DEVICE_PTR;
13268 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13270 else
13272 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13273 x = build_sender_ref (ovar, ctx);
13276 if (is_gimple_omp_oacc (ctx->stmt))
13278 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13280 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13281 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13284 type = TREE_TYPE (ovar);
13285 if (lang_hooks.decls.omp_array_data (ovar, true))
13286 var = lang_hooks.decls.omp_array_data (ovar, false);
13287 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13288 && !omp_privatize_by_reference (ovar)
13289 && !omp_is_allocatable_or_ptr (ovar))
13290 || TREE_CODE (type) == ARRAY_TYPE)
13291 var = build_fold_addr_expr (var);
13292 else
13294 if (omp_privatize_by_reference (ovar)
13295 || omp_check_optional_argument (ovar, false)
13296 || omp_is_allocatable_or_ptr (ovar))
13298 type = TREE_TYPE (type);
13299 if (POINTER_TYPE_P (type)
13300 && TREE_CODE (type) != ARRAY_TYPE
13301 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13302 && !omp_is_allocatable_or_ptr (ovar))
13303 || (omp_privatize_by_reference (ovar)
13304 && omp_is_allocatable_or_ptr (ovar))))
13305 var = build_simple_mem_ref (var);
13306 var = fold_convert (TREE_TYPE (x), var);
13309 tree present;
13310 present = omp_check_optional_argument (ovar, true);
13311 if (present)
13313 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13314 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13315 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13316 tree new_x = unshare_expr (x);
13317 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13318 fb_rvalue);
13319 gcond *cond = gimple_build_cond_from_tree (present,
13320 notnull_label,
13321 null_label);
13322 gimple_seq_add_stmt (&ilist, cond);
13323 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13324 gimplify_assign (new_x, null_pointer_node, &ilist);
13325 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13326 gimple_seq_add_stmt (&ilist,
13327 gimple_build_label (notnull_label));
13328 gimplify_assign (x, var, &ilist);
13329 gimple_seq_add_stmt (&ilist,
13330 gimple_build_label (opt_arg_label));
13332 else
13333 gimplify_assign (x, var, &ilist);
13334 s = size_int (0);
13335 purpose = size_int (map_idx++);
13336 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13337 gcc_checking_assert (tkind
13338 < (HOST_WIDE_INT_C (1U) << talign_shift));
13339 gcc_checking_assert (tkind
13340 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13341 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13342 build_int_cstu (tkind_type, tkind));
13343 break;
13346 gcc_assert (map_idx == map_cnt);
13348 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13349 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13350 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13351 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13352 for (int i = 1; i <= 2; i++)
13353 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13355 gimple_seq initlist = NULL;
13356 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13357 TREE_VEC_ELT (t, i)),
13358 &initlist, true, NULL_TREE);
13359 gimple_seq_add_seq (&ilist, initlist);
13361 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13362 gimple_seq_add_stmt (&olist,
13363 gimple_build_assign (TREE_VEC_ELT (t, i),
13364 clobber));
13366 else if (omp_maybe_offloaded_ctx (ctx->outer))
13368 tree id = get_identifier ("omp declare target");
13369 tree decl = TREE_VEC_ELT (t, i);
13370 DECL_ATTRIBUTES (decl)
13371 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13372 varpool_node *node = varpool_node::get (decl);
13373 if (node)
13375 node->offloadable = 1;
13376 if (ENABLE_OFFLOADING)
13378 g->have_offload = true;
13379 vec_safe_push (offload_vars, t);
13384 tree clobber = build_clobber (ctx->record_type);
13385 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13386 clobber));
13389 /* Once all the expansions are done, sequence all the different
13390 fragments inside gimple_omp_body. */
13392 new_body = NULL;
13394 if (offloaded
13395 && ctx->record_type)
13397 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13398 /* fixup_child_record_type might have changed receiver_decl's type. */
13399 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13400 gimple_seq_add_stmt (&new_body,
13401 gimple_build_assign (ctx->receiver_decl, t));
13403 gimple_seq_add_seq (&new_body, fplist);
13405 if (offloaded || data_region)
13407 tree prev = NULL_TREE;
13408 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13409 switch (OMP_CLAUSE_CODE (c))
13411 tree var, x;
13412 default:
13413 break;
13414 case OMP_CLAUSE_FIRSTPRIVATE:
13415 if (is_gimple_omp_oacc (ctx->stmt))
13416 break;
13417 var = OMP_CLAUSE_DECL (c);
13418 if (omp_privatize_by_reference (var)
13419 || is_gimple_reg_type (TREE_TYPE (var)))
13421 tree new_var = lookup_decl (var, ctx);
13422 tree type;
13423 type = TREE_TYPE (var);
13424 if (omp_privatize_by_reference (var))
13425 type = TREE_TYPE (type);
13426 if ((INTEGRAL_TYPE_P (type)
13427 && TYPE_PRECISION (type) <= POINTER_SIZE)
13428 || TREE_CODE (type) == POINTER_TYPE)
13430 x = build_receiver_ref (var, false, ctx);
13431 if (TREE_CODE (type) != POINTER_TYPE)
13432 x = fold_convert (pointer_sized_int_node, x);
13433 x = fold_convert (type, x);
13434 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13435 fb_rvalue);
13436 if (omp_privatize_by_reference (var))
13438 tree v = create_tmp_var_raw (type, get_name (var));
13439 gimple_add_tmp_var (v);
13440 TREE_ADDRESSABLE (v) = 1;
13441 gimple_seq_add_stmt (&new_body,
13442 gimple_build_assign (v, x));
13443 x = build_fold_addr_expr (v);
13445 gimple_seq_add_stmt (&new_body,
13446 gimple_build_assign (new_var, x));
13448 else
13450 bool by_ref = !omp_privatize_by_reference (var);
13451 x = build_receiver_ref (var, by_ref, ctx);
13452 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13453 fb_rvalue);
13454 gimple_seq_add_stmt (&new_body,
13455 gimple_build_assign (new_var, x));
13458 else if (is_variable_sized (var))
13460 tree pvar = DECL_VALUE_EXPR (var);
13461 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13462 pvar = TREE_OPERAND (pvar, 0);
13463 gcc_assert (DECL_P (pvar));
13464 tree new_var = lookup_decl (pvar, ctx);
13465 x = build_receiver_ref (var, false, ctx);
13466 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13467 gimple_seq_add_stmt (&new_body,
13468 gimple_build_assign (new_var, x));
13470 break;
13471 case OMP_CLAUSE_PRIVATE:
13472 if (is_gimple_omp_oacc (ctx->stmt))
13473 break;
13474 var = OMP_CLAUSE_DECL (c);
13475 if (omp_privatize_by_reference (var))
13477 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13478 tree new_var = lookup_decl (var, ctx);
13479 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13480 if (TREE_CONSTANT (x))
13482 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13483 get_name (var));
13484 gimple_add_tmp_var (x);
13485 TREE_ADDRESSABLE (x) = 1;
13486 x = build_fold_addr_expr_loc (clause_loc, x);
13488 else
13489 break;
13491 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13492 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13493 gimple_seq_add_stmt (&new_body,
13494 gimple_build_assign (new_var, x));
13496 break;
13497 case OMP_CLAUSE_USE_DEVICE_PTR:
13498 case OMP_CLAUSE_USE_DEVICE_ADDR:
13499 case OMP_CLAUSE_IS_DEVICE_PTR:
13500 tree new_var;
13501 gimple_seq assign_body;
13502 bool is_array_data;
13503 bool do_optional_check;
13504 assign_body = NULL;
13505 do_optional_check = false;
13506 var = OMP_CLAUSE_DECL (c);
13507 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13509 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13510 x = build_sender_ref (is_array_data
13511 ? (splay_tree_key) &DECL_NAME (var)
13512 : (splay_tree_key) &DECL_UID (var), ctx);
13513 else
13514 x = build_receiver_ref (var, false, ctx);
13516 if (is_array_data)
13518 bool is_ref = omp_privatize_by_reference (var);
13519 do_optional_check = true;
13520 /* First, we copy the descriptor data from the host; then
13521 we update its data to point to the target address. */
13522 new_var = lookup_decl (var, ctx);
13523 new_var = DECL_VALUE_EXPR (new_var);
13524 tree v = new_var;
13526 if (is_ref)
13528 var = build_fold_indirect_ref (var);
13529 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
13530 fb_rvalue);
13531 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
13532 gimple_add_tmp_var (v);
13533 TREE_ADDRESSABLE (v) = 1;
13534 gimple_seq_add_stmt (&assign_body,
13535 gimple_build_assign (v, var));
13536 tree rhs = build_fold_addr_expr (v);
13537 gimple_seq_add_stmt (&assign_body,
13538 gimple_build_assign (new_var, rhs));
13540 else
13541 gimple_seq_add_stmt (&assign_body,
13542 gimple_build_assign (new_var, var));
13544 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13545 gcc_assert (v2);
13546 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13547 gimple_seq_add_stmt (&assign_body,
13548 gimple_build_assign (v2, x));
13550 else if (is_variable_sized (var))
13552 tree pvar = DECL_VALUE_EXPR (var);
13553 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13554 pvar = TREE_OPERAND (pvar, 0);
13555 gcc_assert (DECL_P (pvar));
13556 new_var = lookup_decl (pvar, ctx);
13557 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13558 gimple_seq_add_stmt (&assign_body,
13559 gimple_build_assign (new_var, x));
13561 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13562 && !omp_privatize_by_reference (var)
13563 && !omp_is_allocatable_or_ptr (var))
13564 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13566 new_var = lookup_decl (var, ctx);
13567 new_var = DECL_VALUE_EXPR (new_var);
13568 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13569 new_var = TREE_OPERAND (new_var, 0);
13570 gcc_assert (DECL_P (new_var));
13571 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13572 gimple_seq_add_stmt (&assign_body,
13573 gimple_build_assign (new_var, x));
13575 else
13577 tree type = TREE_TYPE (var);
13578 new_var = lookup_decl (var, ctx);
13579 if (omp_privatize_by_reference (var))
13581 type = TREE_TYPE (type);
13582 if (POINTER_TYPE_P (type)
13583 && TREE_CODE (type) != ARRAY_TYPE
13584 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13585 || (omp_privatize_by_reference (var)
13586 && omp_is_allocatable_or_ptr (var))))
13588 tree v = create_tmp_var_raw (type, get_name (var));
13589 gimple_add_tmp_var (v);
13590 TREE_ADDRESSABLE (v) = 1;
13591 x = fold_convert (type, x);
13592 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13593 fb_rvalue);
13594 gimple_seq_add_stmt (&assign_body,
13595 gimple_build_assign (v, x));
13596 x = build_fold_addr_expr (v);
13597 do_optional_check = true;
13600 new_var = DECL_VALUE_EXPR (new_var);
13601 x = fold_convert (TREE_TYPE (new_var), x);
13602 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13603 gimple_seq_add_stmt (&assign_body,
13604 gimple_build_assign (new_var, x));
13606 tree present;
13607 present = (do_optional_check
13608 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13609 : NULL_TREE);
13610 if (present)
13612 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13613 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13614 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13615 glabel *null_glabel = gimple_build_label (null_label);
13616 glabel *notnull_glabel = gimple_build_label (notnull_label);
13617 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13618 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13619 fb_rvalue);
13620 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13621 fb_rvalue);
13622 gcond *cond = gimple_build_cond_from_tree (present,
13623 notnull_label,
13624 null_label);
13625 gimple_seq_add_stmt (&new_body, cond);
13626 gimple_seq_add_stmt (&new_body, null_glabel);
13627 gimplify_assign (new_var, null_pointer_node, &new_body);
13628 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13629 gimple_seq_add_stmt (&new_body, notnull_glabel);
13630 gimple_seq_add_seq (&new_body, assign_body);
13631 gimple_seq_add_stmt (&new_body,
13632 gimple_build_label (opt_arg_label));
13634 else
13635 gimple_seq_add_seq (&new_body, assign_body);
13636 break;
13638 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13639 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13640 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13641 or references to VLAs. */
13642 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13643 switch (OMP_CLAUSE_CODE (c))
13645 tree var;
13646 default:
13647 break;
13648 case OMP_CLAUSE_MAP:
13649 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13650 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13652 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13653 poly_int64 offset = 0;
13654 gcc_assert (prev);
13655 var = OMP_CLAUSE_DECL (c);
13656 if (DECL_P (var)
13657 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13658 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13659 ctx))
13660 && varpool_node::get_create (var)->offloadable)
13661 break;
13662 if (TREE_CODE (var) == INDIRECT_REF
13663 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
13664 var = TREE_OPERAND (var, 0);
13665 if (TREE_CODE (var) == COMPONENT_REF)
13667 var = get_addr_base_and_unit_offset (var, &offset);
13668 gcc_assert (var != NULL_TREE && DECL_P (var));
13670 else if (DECL_SIZE (var)
13671 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
13673 tree var2 = DECL_VALUE_EXPR (var);
13674 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
13675 var2 = TREE_OPERAND (var2, 0);
13676 gcc_assert (DECL_P (var2));
13677 var = var2;
13679 tree new_var = lookup_decl (var, ctx), x;
13680 tree type = TREE_TYPE (new_var);
13681 bool is_ref;
13682 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
13683 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13684 == COMPONENT_REF))
13686 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
13687 is_ref = true;
13688 new_var = build2 (MEM_REF, type,
13689 build_fold_addr_expr (new_var),
13690 build_int_cst (build_pointer_type (type),
13691 offset));
13693 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
13695 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
13696 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
13697 new_var = build2 (MEM_REF, type,
13698 build_fold_addr_expr (new_var),
13699 build_int_cst (build_pointer_type (type),
13700 offset));
13702 else
13703 is_ref = omp_privatize_by_reference (var);
13704 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13705 is_ref = false;
13706 bool ref_to_array = false;
13707 if (is_ref)
13709 type = TREE_TYPE (type);
13710 if (TREE_CODE (type) == ARRAY_TYPE)
13712 type = build_pointer_type (type);
13713 ref_to_array = true;
13716 else if (TREE_CODE (type) == ARRAY_TYPE)
13718 tree decl2 = DECL_VALUE_EXPR (new_var);
13719 gcc_assert (TREE_CODE (decl2) == MEM_REF);
13720 decl2 = TREE_OPERAND (decl2, 0);
13721 gcc_assert (DECL_P (decl2));
13722 new_var = decl2;
13723 type = TREE_TYPE (new_var);
13725 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
13726 x = fold_convert_loc (clause_loc, type, x);
13727 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
13729 tree bias = OMP_CLAUSE_SIZE (c);
13730 if (DECL_P (bias))
13731 bias = lookup_decl (bias, ctx);
13732 bias = fold_convert_loc (clause_loc, sizetype, bias);
13733 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
13734 bias);
13735 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
13736 TREE_TYPE (x), x, bias);
13738 if (ref_to_array)
13739 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13740 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13741 if (is_ref && !ref_to_array)
13743 tree t = create_tmp_var_raw (type, get_name (var));
13744 gimple_add_tmp_var (t);
13745 TREE_ADDRESSABLE (t) = 1;
13746 gimple_seq_add_stmt (&new_body,
13747 gimple_build_assign (t, x));
13748 x = build_fold_addr_expr_loc (clause_loc, t);
13750 gimple_seq_add_stmt (&new_body,
13751 gimple_build_assign (new_var, x));
13752 prev = NULL_TREE;
13754 else if (OMP_CLAUSE_CHAIN (c)
13755 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
13756 == OMP_CLAUSE_MAP
13757 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13758 == GOMP_MAP_FIRSTPRIVATE_POINTER
13759 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13760 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13761 prev = c;
13762 break;
13763 case OMP_CLAUSE_PRIVATE:
13764 var = OMP_CLAUSE_DECL (c);
13765 if (is_variable_sized (var))
13767 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13768 tree new_var = lookup_decl (var, ctx);
13769 tree pvar = DECL_VALUE_EXPR (var);
13770 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13771 pvar = TREE_OPERAND (pvar, 0);
13772 gcc_assert (DECL_P (pvar));
13773 tree new_pvar = lookup_decl (pvar, ctx);
13774 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13775 tree al = size_int (DECL_ALIGN (var));
13776 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
13777 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13778 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
13779 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13780 gimple_seq_add_stmt (&new_body,
13781 gimple_build_assign (new_pvar, x));
13783 else if (omp_privatize_by_reference (var)
13784 && !is_gimple_omp_oacc (ctx->stmt))
13786 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13787 tree new_var = lookup_decl (var, ctx);
13788 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13789 if (TREE_CONSTANT (x))
13790 break;
13791 else
13793 tree atmp
13794 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13795 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
13796 tree al = size_int (TYPE_ALIGN (rtype));
13797 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13800 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13801 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13802 gimple_seq_add_stmt (&new_body,
13803 gimple_build_assign (new_var, x));
13805 break;
13808 gimple_seq fork_seq = NULL;
13809 gimple_seq join_seq = NULL;
13811 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
13813 /* If there are reductions on the offloaded region itself, treat
13814 them as a dummy GANG loop. */
13815 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
13817 gcall *private_marker = lower_oacc_private_marker (ctx);
13819 if (private_marker)
13820 gimple_call_set_arg (private_marker, 2, level);
13822 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
13823 false, NULL, private_marker, NULL, &fork_seq,
13824 &join_seq, ctx);
13827 gimple_seq_add_seq (&new_body, fork_seq);
13828 gimple_seq_add_seq (&new_body, tgt_body);
13829 gimple_seq_add_seq (&new_body, join_seq);
13831 if (offloaded)
13833 new_body = maybe_catch_exception (new_body);
13834 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
13836 gimple_omp_set_body (stmt, new_body);
13839 bind = gimple_build_bind (NULL, NULL,
13840 tgt_bind ? gimple_bind_block (tgt_bind)
13841 : NULL_TREE);
13842 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
13843 gimple_bind_add_seq (bind, ilist);
13844 gimple_bind_add_stmt (bind, stmt);
13845 gimple_bind_add_seq (bind, olist);
13847 pop_gimplify_context (NULL);
13849 if (dep_bind)
13851 gimple_bind_add_seq (dep_bind, dep_ilist);
13852 gimple_bind_add_stmt (dep_bind, bind);
13853 gimple_bind_add_seq (dep_bind, dep_olist);
13854 pop_gimplify_context (dep_bind);
13858 /* Expand code for an OpenMP teams directive. */
13860 static void
13861 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13863 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
13864 push_gimplify_context ();
13866 tree block = make_node (BLOCK);
13867 gbind *bind = gimple_build_bind (NULL, NULL, block);
13868 gsi_replace (gsi_p, bind, true);
13869 gimple_seq bind_body = NULL;
13870 gimple_seq dlist = NULL;
13871 gimple_seq olist = NULL;
13873 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13874 OMP_CLAUSE_NUM_TEAMS);
13875 if (num_teams == NULL_TREE)
13876 num_teams = build_int_cst (unsigned_type_node, 0);
13877 else
13879 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
13880 num_teams = fold_convert (unsigned_type_node, num_teams);
13881 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
13883 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13884 OMP_CLAUSE_THREAD_LIMIT);
13885 if (thread_limit == NULL_TREE)
13886 thread_limit = build_int_cst (unsigned_type_node, 0);
13887 else
13889 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
13890 thread_limit = fold_convert (unsigned_type_node, thread_limit);
13891 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
13892 fb_rvalue);
13895 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
13896 &bind_body, &dlist, ctx, NULL);
13897 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
13898 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
13899 NULL, ctx);
13900 gimple_seq_add_stmt (&bind_body, teams_stmt);
13902 location_t loc = gimple_location (teams_stmt);
13903 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
13904 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
13905 gimple_set_location (call, loc);
13906 gimple_seq_add_stmt (&bind_body, call);
13908 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
13909 gimple_omp_set_body (teams_stmt, NULL);
13910 gimple_seq_add_seq (&bind_body, olist);
13911 gimple_seq_add_seq (&bind_body, dlist);
13912 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
13913 gimple_bind_set_body (bind, bind_body);
13915 pop_gimplify_context (bind);
13917 gimple_bind_append_vars (bind, ctx->block_vars);
13918 BLOCK_VARS (block) = ctx->block_vars;
13919 if (BLOCK_VARS (block))
13920 TREE_USED (block) = 1;
13923 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
13924 regimplified. If DATA is non-NULL, lower_omp_1 is outside
13925 of OMP context, but with task_shared_vars set. */
13927 static tree
13928 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
13929 void *data)
13931 tree t = *tp;
13933 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
13934 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
13935 && data == NULL
13936 && DECL_HAS_VALUE_EXPR_P (t))
13937 return t;
13939 if (task_shared_vars
13940 && DECL_P (t)
13941 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
13942 return t;
13944 /* If a global variable has been privatized, TREE_CONSTANT on
13945 ADDR_EXPR might be wrong. */
13946 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
13947 recompute_tree_invariant_for_addr_expr (t);
13949 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
13950 return NULL_TREE;
13953 /* Data to be communicated between lower_omp_regimplify_operands and
13954 lower_omp_regimplify_operands_p. */
13956 struct lower_omp_regimplify_operands_data
13958 omp_context *ctx;
13959 vec<tree> *decls;
13962 /* Helper function for lower_omp_regimplify_operands. Find
13963 omp_member_access_dummy_var vars and adjust temporarily their
13964 DECL_VALUE_EXPRs if needed. */
13966 static tree
13967 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
13968 void *data)
13970 tree t = omp_member_access_dummy_var (*tp);
13971 if (t)
13973 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
13974 lower_omp_regimplify_operands_data *ldata
13975 = (lower_omp_regimplify_operands_data *) wi->info;
13976 tree o = maybe_lookup_decl (t, ldata->ctx);
13977 if (o != t)
13979 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
13980 ldata->decls->safe_push (*tp);
13981 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
13982 SET_DECL_VALUE_EXPR (*tp, v);
13985 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
13986 return NULL_TREE;
13989 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
13990 of omp_member_access_dummy_var vars during regimplification. */
13992 static void
13993 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
13994 gimple_stmt_iterator *gsi_p)
13996 auto_vec<tree, 10> decls;
13997 if (ctx)
13999 struct walk_stmt_info wi;
14000 memset (&wi, '\0', sizeof (wi));
14001 struct lower_omp_regimplify_operands_data data;
14002 data.ctx = ctx;
14003 data.decls = &decls;
14004 wi.info = &data;
14005 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14007 gimple_regimplify_operands (stmt, gsi_p);
14008 while (!decls.is_empty ())
14010 tree t = decls.pop ();
14011 tree v = decls.pop ();
14012 SET_DECL_VALUE_EXPR (t, v);
14016 static void
14017 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14019 gimple *stmt = gsi_stmt (*gsi_p);
14020 struct walk_stmt_info wi;
14021 gcall *call_stmt;
14023 if (gimple_has_location (stmt))
14024 input_location = gimple_location (stmt);
14026 if (task_shared_vars)
14027 memset (&wi, '\0', sizeof (wi));
14029 /* If we have issued syntax errors, avoid doing any heavy lifting.
14030 Just replace the OMP directives with a NOP to avoid
14031 confusing RTL expansion. */
14032 if (seen_error () && is_gimple_omp (stmt))
14034 gsi_replace (gsi_p, gimple_build_nop (), true);
14035 return;
14038 switch (gimple_code (stmt))
14040 case GIMPLE_COND:
14042 gcond *cond_stmt = as_a <gcond *> (stmt);
14043 if ((ctx || task_shared_vars)
14044 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14045 lower_omp_regimplify_p,
14046 ctx ? NULL : &wi, NULL)
14047 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14048 lower_omp_regimplify_p,
14049 ctx ? NULL : &wi, NULL)))
14050 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14052 break;
14053 case GIMPLE_CATCH:
14054 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14055 break;
14056 case GIMPLE_EH_FILTER:
14057 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14058 break;
14059 case GIMPLE_TRY:
14060 lower_omp (gimple_try_eval_ptr (stmt), ctx);
14061 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14062 break;
14063 case GIMPLE_TRANSACTION:
14064 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14065 ctx);
14066 break;
14067 case GIMPLE_BIND:
14068 if (ctx && is_gimple_omp_oacc (ctx->stmt))
14070 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14071 oacc_privatization_scan_decl_chain (ctx, vars);
14073 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14074 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14075 break;
14076 case GIMPLE_OMP_PARALLEL:
14077 case GIMPLE_OMP_TASK:
14078 ctx = maybe_lookup_ctx (stmt);
14079 gcc_assert (ctx);
14080 if (ctx->cancellable)
14081 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14082 lower_omp_taskreg (gsi_p, ctx);
14083 break;
14084 case GIMPLE_OMP_FOR:
14085 ctx = maybe_lookup_ctx (stmt);
14086 gcc_assert (ctx);
14087 if (ctx->cancellable)
14088 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14089 lower_omp_for (gsi_p, ctx);
14090 break;
14091 case GIMPLE_OMP_SECTIONS:
14092 ctx = maybe_lookup_ctx (stmt);
14093 gcc_assert (ctx);
14094 if (ctx->cancellable)
14095 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14096 lower_omp_sections (gsi_p, ctx);
14097 break;
14098 case GIMPLE_OMP_SCOPE:
14099 ctx = maybe_lookup_ctx (stmt);
14100 gcc_assert (ctx);
14101 lower_omp_scope (gsi_p, ctx);
14102 break;
14103 case GIMPLE_OMP_SINGLE:
14104 ctx = maybe_lookup_ctx (stmt);
14105 gcc_assert (ctx);
14106 lower_omp_single (gsi_p, ctx);
14107 break;
14108 case GIMPLE_OMP_MASTER:
14109 case GIMPLE_OMP_MASKED:
14110 ctx = maybe_lookup_ctx (stmt);
14111 gcc_assert (ctx);
14112 lower_omp_master (gsi_p, ctx);
14113 break;
14114 case GIMPLE_OMP_TASKGROUP:
14115 ctx = maybe_lookup_ctx (stmt);
14116 gcc_assert (ctx);
14117 lower_omp_taskgroup (gsi_p, ctx);
14118 break;
14119 case GIMPLE_OMP_ORDERED:
14120 ctx = maybe_lookup_ctx (stmt);
14121 gcc_assert (ctx);
14122 lower_omp_ordered (gsi_p, ctx);
14123 break;
14124 case GIMPLE_OMP_SCAN:
14125 ctx = maybe_lookup_ctx (stmt);
14126 gcc_assert (ctx);
14127 lower_omp_scan (gsi_p, ctx);
14128 break;
14129 case GIMPLE_OMP_CRITICAL:
14130 ctx = maybe_lookup_ctx (stmt);
14131 gcc_assert (ctx);
14132 lower_omp_critical (gsi_p, ctx);
14133 break;
14134 case GIMPLE_OMP_ATOMIC_LOAD:
14135 if ((ctx || task_shared_vars)
14136 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14137 as_a <gomp_atomic_load *> (stmt)),
14138 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14139 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14140 break;
14141 case GIMPLE_OMP_TARGET:
14142 ctx = maybe_lookup_ctx (stmt);
14143 gcc_assert (ctx);
14144 lower_omp_target (gsi_p, ctx);
14145 break;
14146 case GIMPLE_OMP_TEAMS:
14147 ctx = maybe_lookup_ctx (stmt);
14148 gcc_assert (ctx);
14149 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14150 lower_omp_taskreg (gsi_p, ctx);
14151 else
14152 lower_omp_teams (gsi_p, ctx);
14153 break;
14154 case GIMPLE_CALL:
14155 tree fndecl;
14156 call_stmt = as_a <gcall *> (stmt);
14157 fndecl = gimple_call_fndecl (call_stmt);
14158 if (fndecl
14159 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14160 switch (DECL_FUNCTION_CODE (fndecl))
14162 case BUILT_IN_GOMP_BARRIER:
14163 if (ctx == NULL)
14164 break;
14165 /* FALLTHRU */
14166 case BUILT_IN_GOMP_CANCEL:
14167 case BUILT_IN_GOMP_CANCELLATION_POINT:
14168 omp_context *cctx;
14169 cctx = ctx;
14170 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14171 cctx = cctx->outer;
14172 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14173 if (!cctx->cancellable)
14175 if (DECL_FUNCTION_CODE (fndecl)
14176 == BUILT_IN_GOMP_CANCELLATION_POINT)
14178 stmt = gimple_build_nop ();
14179 gsi_replace (gsi_p, stmt, false);
14181 break;
14183 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14185 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14186 gimple_call_set_fndecl (call_stmt, fndecl);
14187 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14189 tree lhs;
14190 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14191 gimple_call_set_lhs (call_stmt, lhs);
14192 tree fallthru_label;
14193 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14194 gimple *g;
14195 g = gimple_build_label (fallthru_label);
14196 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14197 g = gimple_build_cond (NE_EXPR, lhs,
14198 fold_convert (TREE_TYPE (lhs),
14199 boolean_false_node),
14200 cctx->cancel_label, fallthru_label);
14201 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14202 break;
14203 default:
14204 break;
14206 goto regimplify;
14208 case GIMPLE_ASSIGN:
14209 for (omp_context *up = ctx; up; up = up->outer)
14211 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14212 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14213 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14214 || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14215 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14216 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14217 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14218 && (gimple_omp_target_kind (up->stmt)
14219 == GF_OMP_TARGET_KIND_DATA)))
14220 continue;
14221 else if (!up->lastprivate_conditional_map)
14222 break;
14223 tree lhs = get_base_address (gimple_assign_lhs (stmt));
14224 if (TREE_CODE (lhs) == MEM_REF
14225 && DECL_P (TREE_OPERAND (lhs, 0))
14226 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14227 0))) == REFERENCE_TYPE)
14228 lhs = TREE_OPERAND (lhs, 0);
14229 if (DECL_P (lhs))
14230 if (tree *v = up->lastprivate_conditional_map->get (lhs))
14232 tree clauses;
14233 if (up->combined_into_simd_safelen1)
14235 up = up->outer;
14236 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14237 up = up->outer;
14239 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14240 clauses = gimple_omp_for_clauses (up->stmt);
14241 else
14242 clauses = gimple_omp_sections_clauses (up->stmt);
14243 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14244 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14245 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14246 OMP_CLAUSE__CONDTEMP_);
14247 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14248 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14249 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14252 /* FALLTHRU */
14254 default:
14255 regimplify:
14256 if ((ctx || task_shared_vars)
14257 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14258 ctx ? NULL : &wi))
14260 /* Just remove clobbers, this should happen only if we have
14261 "privatized" local addressable variables in SIMD regions,
14262 the clobber isn't needed in that case and gimplifying address
14263 of the ARRAY_REF into a pointer and creating MEM_REF based
14264 clobber would create worse code than we get with the clobber
14265 dropped. */
14266 if (gimple_clobber_p (stmt))
14268 gsi_replace (gsi_p, gimple_build_nop (), true);
14269 break;
14271 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14273 break;
14277 static void
14278 lower_omp (gimple_seq *body, omp_context *ctx)
14280 location_t saved_location = input_location;
14281 gimple_stmt_iterator gsi;
14282 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14283 lower_omp_1 (&gsi, ctx);
14284 /* During gimplification, we haven't folded statments inside offloading
14285 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
14286 if (target_nesting_level || taskreg_nesting_level)
14287 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14288 fold_stmt (&gsi);
14289 input_location = saved_location;
14292 /* Main entry point. */
14294 static unsigned int
14295 execute_lower_omp (void)
14297 gimple_seq body;
14298 int i;
14299 omp_context *ctx;
14301 /* This pass always runs, to provide PROP_gimple_lomp.
14302 But often, there is nothing to do. */
14303 if (flag_openacc == 0 && flag_openmp == 0
14304 && flag_openmp_simd == 0)
14305 return 0;
14307 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14308 delete_omp_context);
14310 body = gimple_body (current_function_decl);
14312 scan_omp (&body, NULL);
14313 gcc_assert (taskreg_nesting_level == 0);
14314 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14315 finish_taskreg_scan (ctx);
14316 taskreg_contexts.release ();
14318 if (all_contexts->root)
14320 if (task_shared_vars)
14321 push_gimplify_context ();
14322 lower_omp (&body, NULL);
14323 if (task_shared_vars)
14324 pop_gimplify_context (NULL);
14327 if (all_contexts)
14329 splay_tree_delete (all_contexts);
14330 all_contexts = NULL;
14332 BITMAP_FREE (task_shared_vars);
14333 BITMAP_FREE (global_nonaddressable_vars);
14335 /* If current function is a method, remove artificial dummy VAR_DECL created
14336 for non-static data member privatization, they aren't needed for
14337 debuginfo nor anything else, have been already replaced everywhere in the
14338 IL and cause problems with LTO. */
14339 if (DECL_ARGUMENTS (current_function_decl)
14340 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14341 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14342 == POINTER_TYPE))
14343 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14344 return 0;
14347 namespace {
14349 const pass_data pass_data_lower_omp =
14351 GIMPLE_PASS, /* type */
14352 "omplower", /* name */
14353 OPTGROUP_OMP, /* optinfo_flags */
14354 TV_NONE, /* tv_id */
14355 PROP_gimple_any, /* properties_required */
14356 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14357 0, /* properties_destroyed */
14358 0, /* todo_flags_start */
14359 0, /* todo_flags_finish */
14362 class pass_lower_omp : public gimple_opt_pass
14364 public:
14365 pass_lower_omp (gcc::context *ctxt)
14366 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14369 /* opt_pass methods: */
14370 virtual unsigned int execute (function *) { return execute_lower_omp (); }
14372 }; // class pass_lower_omp
14374 } // anon namespace
14376 gimple_opt_pass *
14377 make_pass_lower_omp (gcc::context *ctxt)
14379 return new pass_lower_omp (ctxt);
14382 /* The following is a utility to diagnose structured block violations.
14383 It is not part of the "omplower" pass, as that's invoked too late. It
14384 should be invoked by the respective front ends after gimplification. */
14386 static splay_tree all_labels;
14388 /* Check for mismatched contexts and generate an error if needed. Return
14389 true if an error is detected. */
14391 static bool
14392 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14393 gimple *branch_ctx, gimple *label_ctx)
14395 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14396 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14398 if (label_ctx == branch_ctx)
14399 return false;
14401 const char* kind = NULL;
14403 if (flag_openacc)
14405 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14406 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14408 gcc_checking_assert (kind == NULL);
14409 kind = "OpenACC";
14412 if (kind == NULL)
14414 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14415 kind = "OpenMP";
14418 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14419 so we could traverse it and issue a correct "exit" or "enter" error
14420 message upon a structured block violation.
14422 We built the context by building a list with tree_cons'ing, but there is
14423 no easy counterpart in gimple tuples. It seems like far too much work
14424 for issuing exit/enter error messages. If someone really misses the
14425 distinct error message... patches welcome. */
14427 #if 0
14428 /* Try to avoid confusing the user by producing and error message
14429 with correct "exit" or "enter" verbiage. We prefer "exit"
14430 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14431 if (branch_ctx == NULL)
14432 exit_p = false;
14433 else
14435 while (label_ctx)
14437 if (TREE_VALUE (label_ctx) == branch_ctx)
14439 exit_p = false;
14440 break;
14442 label_ctx = TREE_CHAIN (label_ctx);
14446 if (exit_p)
14447 error ("invalid exit from %s structured block", kind);
14448 else
14449 error ("invalid entry to %s structured block", kind);
14450 #endif
14452 /* If it's obvious we have an invalid entry, be specific about the error. */
14453 if (branch_ctx == NULL)
14454 error ("invalid entry to %s structured block", kind);
14455 else
14457 /* Otherwise, be vague and lazy, but efficient. */
14458 error ("invalid branch to/from %s structured block", kind);
14461 gsi_replace (gsi_p, gimple_build_nop (), false);
14462 return true;
14465 /* Pass 1: Create a minimal tree of structured blocks, and record
14466 where each label is found. */
14468 static tree
14469 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14470 struct walk_stmt_info *wi)
14472 gimple *context = (gimple *) wi->info;
14473 gimple *inner_context;
14474 gimple *stmt = gsi_stmt (*gsi_p);
14476 *handled_ops_p = true;
14478 switch (gimple_code (stmt))
14480 WALK_SUBSTMTS;
14482 case GIMPLE_OMP_PARALLEL:
14483 case GIMPLE_OMP_TASK:
14484 case GIMPLE_OMP_SCOPE:
14485 case GIMPLE_OMP_SECTIONS:
14486 case GIMPLE_OMP_SINGLE:
14487 case GIMPLE_OMP_SECTION:
14488 case GIMPLE_OMP_MASTER:
14489 case GIMPLE_OMP_MASKED:
14490 case GIMPLE_OMP_ORDERED:
14491 case GIMPLE_OMP_SCAN:
14492 case GIMPLE_OMP_CRITICAL:
14493 case GIMPLE_OMP_TARGET:
14494 case GIMPLE_OMP_TEAMS:
14495 case GIMPLE_OMP_TASKGROUP:
14496 /* The minimal context here is just the current OMP construct. */
14497 inner_context = stmt;
14498 wi->info = inner_context;
14499 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14500 wi->info = context;
14501 break;
14503 case GIMPLE_OMP_FOR:
14504 inner_context = stmt;
14505 wi->info = inner_context;
14506 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14507 walk them. */
14508 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14509 diagnose_sb_1, NULL, wi);
14510 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14511 wi->info = context;
14512 break;
14514 case GIMPLE_LABEL:
14515 splay_tree_insert (all_labels,
14516 (splay_tree_key) gimple_label_label (
14517 as_a <glabel *> (stmt)),
14518 (splay_tree_value) context);
14519 break;
14521 default:
14522 break;
14525 return NULL_TREE;
14528 /* Pass 2: Check each branch and see if its context differs from that of
14529 the destination label's context. */
14531 static tree
14532 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14533 struct walk_stmt_info *wi)
14535 gimple *context = (gimple *) wi->info;
14536 splay_tree_node n;
14537 gimple *stmt = gsi_stmt (*gsi_p);
14539 *handled_ops_p = true;
14541 switch (gimple_code (stmt))
14543 WALK_SUBSTMTS;
14545 case GIMPLE_OMP_PARALLEL:
14546 case GIMPLE_OMP_TASK:
14547 case GIMPLE_OMP_SCOPE:
14548 case GIMPLE_OMP_SECTIONS:
14549 case GIMPLE_OMP_SINGLE:
14550 case GIMPLE_OMP_SECTION:
14551 case GIMPLE_OMP_MASTER:
14552 case GIMPLE_OMP_MASKED:
14553 case GIMPLE_OMP_ORDERED:
14554 case GIMPLE_OMP_SCAN:
14555 case GIMPLE_OMP_CRITICAL:
14556 case GIMPLE_OMP_TARGET:
14557 case GIMPLE_OMP_TEAMS:
14558 case GIMPLE_OMP_TASKGROUP:
14559 wi->info = stmt;
14560 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14561 wi->info = context;
14562 break;
14564 case GIMPLE_OMP_FOR:
14565 wi->info = stmt;
14566 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14567 walk them. */
14568 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14569 diagnose_sb_2, NULL, wi);
14570 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14571 wi->info = context;
14572 break;
14574 case GIMPLE_COND:
14576 gcond *cond_stmt = as_a <gcond *> (stmt);
14577 tree lab = gimple_cond_true_label (cond_stmt);
14578 if (lab)
14580 n = splay_tree_lookup (all_labels,
14581 (splay_tree_key) lab);
14582 diagnose_sb_0 (gsi_p, context,
14583 n ? (gimple *) n->value : NULL);
14585 lab = gimple_cond_false_label (cond_stmt);
14586 if (lab)
14588 n = splay_tree_lookup (all_labels,
14589 (splay_tree_key) lab);
14590 diagnose_sb_0 (gsi_p, context,
14591 n ? (gimple *) n->value : NULL);
14594 break;
14596 case GIMPLE_GOTO:
14598 tree lab = gimple_goto_dest (stmt);
14599 if (TREE_CODE (lab) != LABEL_DECL)
14600 break;
14602 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14603 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14605 break;
14607 case GIMPLE_SWITCH:
14609 gswitch *switch_stmt = as_a <gswitch *> (stmt);
14610 unsigned int i;
14611 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
14613 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
14614 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14615 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
14616 break;
14619 break;
14621 case GIMPLE_RETURN:
14622 diagnose_sb_0 (gsi_p, context, NULL);
14623 break;
14625 default:
14626 break;
14629 return NULL_TREE;
14632 static unsigned int
14633 diagnose_omp_structured_block_errors (void)
14635 struct walk_stmt_info wi;
14636 gimple_seq body = gimple_body (current_function_decl);
14638 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
14640 memset (&wi, 0, sizeof (wi));
14641 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
14643 memset (&wi, 0, sizeof (wi));
14644 wi.want_locations = true;
14645 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
14647 gimple_set_body (current_function_decl, body);
14649 splay_tree_delete (all_labels);
14650 all_labels = NULL;
14652 return 0;
14655 namespace {
14657 const pass_data pass_data_diagnose_omp_blocks =
14659 GIMPLE_PASS, /* type */
14660 "*diagnose_omp_blocks", /* name */
14661 OPTGROUP_OMP, /* optinfo_flags */
14662 TV_NONE, /* tv_id */
14663 PROP_gimple_any, /* properties_required */
14664 0, /* properties_provided */
14665 0, /* properties_destroyed */
14666 0, /* todo_flags_start */
14667 0, /* todo_flags_finish */
14670 class pass_diagnose_omp_blocks : public gimple_opt_pass
14672 public:
14673 pass_diagnose_omp_blocks (gcc::context *ctxt)
14674 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
14677 /* opt_pass methods: */
14678 virtual bool gate (function *)
14680 return flag_openacc || flag_openmp || flag_openmp_simd;
14682 virtual unsigned int execute (function *)
14684 return diagnose_omp_structured_block_errors ();
14687 }; // class pass_diagnose_omp_blocks
14689 } // anon namespace
14691 gimple_opt_pass *
14692 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
14694 return new pass_diagnose_omp_blocks (ctxt);
14698 #include "gt-omp-low.h"