ada: Fix wrong resolution for hidden discriminant in predicate
[official-gcc.git] / gcc / omp-low.cc
blobb882df048ef20df77c785b35ee8f2fffe3224e2f
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2023 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "gimplify.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
154 /* True if this construct can be cancelled. */
155 bool cancellable;
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap make_addressable_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
194 static vec<gomp_task *> task_cpyfns;
196 static void scan_omp (gimple_seq *, omp_context *);
197 static tree scan_omp_1_op (tree *, int *, void *);
198 static bool omp_maybe_offloaded_ctx (omp_context *ctx);
200 #define WALK_SUBSTMTS \
201 case GIMPLE_BIND: \
202 case GIMPLE_TRY: \
203 case GIMPLE_CATCH: \
204 case GIMPLE_EH_FILTER: \
205 case GIMPLE_ASSUME: \
206 case GIMPLE_TRANSACTION: \
207 /* The sub-statements for these should be walked. */ \
208 *handled_ops_p = false; \
209 break;
211 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
212 (This doesn't include OpenACC 'kernels' decomposed parts.) */
214 static bool
215 is_oacc_parallel_or_serial (omp_context *ctx)
217 enum gimple_code outer_type = gimple_code (ctx->stmt);
218 return ((outer_type == GIMPLE_OMP_TARGET)
219 && ((gimple_omp_target_kind (ctx->stmt)
220 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
221 || (gimple_omp_target_kind (ctx->stmt)
222 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
225 /* Return whether CTX represents an OpenACC 'kernels' construct.
226 (This doesn't include OpenACC 'kernels' decomposed parts.) */
228 static bool
229 is_oacc_kernels (omp_context *ctx)
231 enum gimple_code outer_type = gimple_code (ctx->stmt);
232 return ((outer_type == GIMPLE_OMP_TARGET)
233 && (gimple_omp_target_kind (ctx->stmt)
234 == GF_OMP_TARGET_KIND_OACC_KERNELS));
237 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
239 static bool
240 is_oacc_kernels_decomposed_part (omp_context *ctx)
242 enum gimple_code outer_type = gimple_code (ctx->stmt);
243 return ((outer_type == GIMPLE_OMP_TARGET)
244 && ((gimple_omp_target_kind (ctx->stmt)
245 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
246 || (gimple_omp_target_kind (ctx->stmt)
247 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
248 || (gimple_omp_target_kind (ctx->stmt)
249 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
252 /* Return true if STMT corresponds to an OpenMP target region. */
253 static bool
254 is_omp_target (gimple *stmt)
256 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
258 int kind = gimple_omp_target_kind (stmt);
259 return (kind == GF_OMP_TARGET_KIND_REGION
260 || kind == GF_OMP_TARGET_KIND_DATA
261 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
262 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
264 return false;
267 /* If DECL is the artificial dummy VAR_DECL created for non-static
268 data member privatization, return the underlying "this" parameter,
269 otherwise return NULL. */
271 tree
272 omp_member_access_dummy_var (tree decl)
274 if (!VAR_P (decl)
275 || !DECL_ARTIFICIAL (decl)
276 || !DECL_IGNORED_P (decl)
277 || !DECL_HAS_VALUE_EXPR_P (decl)
278 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
279 return NULL_TREE;
281 tree v = DECL_VALUE_EXPR (decl);
282 if (TREE_CODE (v) != COMPONENT_REF)
283 return NULL_TREE;
285 while (1)
286 switch (TREE_CODE (v))
288 case COMPONENT_REF:
289 case MEM_REF:
290 case INDIRECT_REF:
291 CASE_CONVERT:
292 case POINTER_PLUS_EXPR:
293 v = TREE_OPERAND (v, 0);
294 continue;
295 case PARM_DECL:
296 if (DECL_CONTEXT (v) == current_function_decl
297 && DECL_ARTIFICIAL (v)
298 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
299 return v;
300 return NULL_TREE;
301 default:
302 return NULL_TREE;
306 /* Helper for unshare_and_remap, called through walk_tree. */
308 static tree
309 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
311 tree *pair = (tree *) data;
312 if (*tp == pair[0])
314 *tp = unshare_expr (pair[1]);
315 *walk_subtrees = 0;
317 else if (IS_TYPE_OR_DECL_P (*tp))
318 *walk_subtrees = 0;
319 return NULL_TREE;
322 /* Return unshare_expr (X) with all occurrences of FROM
323 replaced with TO. */
325 static tree
326 unshare_and_remap (tree x, tree from, tree to)
328 tree pair[2] = { from, to };
329 x = unshare_expr (x);
330 walk_tree (&x, unshare_and_remap_1, pair, NULL);
331 return x;
334 /* Convenience function for calling scan_omp_1_op on tree operands. */
336 static inline tree
337 scan_omp_op (tree *tp, omp_context *ctx)
339 struct walk_stmt_info wi;
341 memset (&wi, 0, sizeof (wi));
342 wi.info = ctx;
343 wi.want_locations = true;
345 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
348 static void lower_omp (gimple_seq *, omp_context *);
349 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
350 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
352 /* Return true if CTX is for an omp parallel. */
354 static inline bool
355 is_parallel_ctx (omp_context *ctx)
357 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
361 /* Return true if CTX is for an omp task. */
363 static inline bool
364 is_task_ctx (omp_context *ctx)
366 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
370 /* Return true if CTX is for an omp taskloop. */
372 static inline bool
373 is_taskloop_ctx (omp_context *ctx)
375 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
376 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
380 /* Return true if CTX is for a host omp teams. */
382 static inline bool
383 is_host_teams_ctx (omp_context *ctx)
385 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
386 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
389 /* Return true if CTX is for an omp parallel or omp task or host omp teams
390 (the last one is strictly not a task region in OpenMP speak, but we
391 need to treat it similarly). */
393 static inline bool
394 is_taskreg_ctx (omp_context *ctx)
396 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
399 /* Return true if EXPR is variable sized. */
401 static inline bool
402 is_variable_sized (const_tree expr)
404 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
407 /* Lookup variables. The "maybe" form
408 allows for the variable form to not have been entered, otherwise we
409 assert that the variable must have been entered. */
411 static inline tree
412 lookup_decl (tree var, omp_context *ctx)
414 tree *n = ctx->cb.decl_map->get (var);
415 return *n;
418 static inline tree
419 maybe_lookup_decl (const_tree var, omp_context *ctx)
421 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
422 return n ? *n : NULL_TREE;
425 static inline tree
426 lookup_field (tree var, omp_context *ctx)
428 splay_tree_node n;
429 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
430 return (tree) n->value;
433 static inline tree
434 lookup_sfield (splay_tree_key key, omp_context *ctx)
436 splay_tree_node n;
437 n = splay_tree_lookup (ctx->sfield_map
438 ? ctx->sfield_map : ctx->field_map, key);
439 return (tree) n->value;
442 static inline tree
443 lookup_sfield (tree var, omp_context *ctx)
445 return lookup_sfield ((splay_tree_key) var, ctx);
448 static inline tree
449 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
451 splay_tree_node n;
452 n = splay_tree_lookup (ctx->field_map, key);
453 return n ? (tree) n->value : NULL_TREE;
456 static inline tree
457 maybe_lookup_field (tree var, omp_context *ctx)
459 return maybe_lookup_field ((splay_tree_key) var, ctx);
462 /* Return true if DECL should be copied by pointer. SHARED_CTX is
463 the parallel context if DECL is to be shared. */
465 static bool
466 use_pointer_for_field (tree decl, omp_context *shared_ctx)
468 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
469 || TYPE_ATOMIC (TREE_TYPE (decl)))
470 return true;
472 /* We can only use copy-in/copy-out semantics for shared variables
473 when we know the value is not accessible from an outer scope. */
474 if (shared_ctx)
476 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
478 /* ??? Trivially accessible from anywhere. But why would we even
479 be passing an address in this case? Should we simply assert
480 this to be false, or should we have a cleanup pass that removes
481 these from the list of mappings? */
482 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
483 return true;
485 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
486 without analyzing the expression whether or not its location
487 is accessible to anyone else. In the case of nested parallel
488 regions it certainly may be. */
489 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
490 return true;
492 /* Do not use copy-in/copy-out for variables that have their
493 address taken. */
494 if (is_global_var (decl))
496 /* For file scope vars, track whether we've seen them as
497 non-addressable initially and in that case, keep the same
498 answer for the duration of the pass, even when they are made
499 addressable later on e.g. through reduction expansion. Global
500 variables which weren't addressable before the pass will not
501 have their privatized copies address taken. See PR91216. */
502 if (!TREE_ADDRESSABLE (decl))
504 if (!global_nonaddressable_vars)
505 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
506 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
508 else if (!global_nonaddressable_vars
509 || !bitmap_bit_p (global_nonaddressable_vars,
510 DECL_UID (decl)))
511 return true;
513 else if (TREE_ADDRESSABLE (decl))
514 return true;
516 /* lower_send_shared_vars only uses copy-in, but not copy-out
517 for these. */
518 if (TREE_READONLY (decl)
519 || ((TREE_CODE (decl) == RESULT_DECL
520 || TREE_CODE (decl) == PARM_DECL)
521 && DECL_BY_REFERENCE (decl)))
522 return false;
524 /* Disallow copy-in/out in nested parallel if
525 decl is shared in outer parallel, otherwise
526 each thread could store the shared variable
527 in its own copy-in location, making the
528 variable no longer really shared. */
529 if (shared_ctx->is_nested)
531 omp_context *up;
533 for (up = shared_ctx->outer; up; up = up->outer)
534 if ((is_taskreg_ctx (up)
535 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
536 && is_gimple_omp_offloaded (up->stmt)))
537 && maybe_lookup_decl (decl, up))
538 break;
540 if (up)
542 tree c;
544 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
546 for (c = gimple_omp_target_clauses (up->stmt);
547 c; c = OMP_CLAUSE_CHAIN (c))
548 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
549 && OMP_CLAUSE_DECL (c) == decl)
550 break;
552 else
553 for (c = gimple_omp_taskreg_clauses (up->stmt);
554 c; c = OMP_CLAUSE_CHAIN (c))
555 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
556 && OMP_CLAUSE_DECL (c) == decl)
557 break;
559 if (c)
560 goto maybe_mark_addressable_and_ret;
564 /* For tasks avoid using copy-in/out. As tasks can be
565 deferred or executed in different thread, when GOMP_task
566 returns, the task hasn't necessarily terminated. */
567 if (is_task_ctx (shared_ctx))
569 tree outer;
570 maybe_mark_addressable_and_ret:
571 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
572 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
574 /* Taking address of OUTER in lower_send_shared_vars
575 might need regimplification of everything that uses the
576 variable. */
577 if (!make_addressable_vars)
578 make_addressable_vars = BITMAP_ALLOC (NULL);
579 bitmap_set_bit (make_addressable_vars, DECL_UID (outer));
580 TREE_ADDRESSABLE (outer) = 1;
582 return true;
586 return false;
589 /* Construct a new automatic decl similar to VAR. */
591 static tree
592 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
594 tree copy = copy_var_decl (var, name, type);
596 DECL_CONTEXT (copy) = current_function_decl;
598 if (ctx)
600 DECL_CHAIN (copy) = ctx->block_vars;
601 ctx->block_vars = copy;
603 else
604 record_vars (copy);
606 /* If VAR is listed in make_addressable_vars, it wasn't
607 originally addressable, but was only later made so.
608 We don't need to take address of privatizations
609 from that var. */
610 if (TREE_ADDRESSABLE (var)
611 && ((make_addressable_vars
612 && bitmap_bit_p (make_addressable_vars, DECL_UID (var)))
613 || (global_nonaddressable_vars
614 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
615 TREE_ADDRESSABLE (copy) = 0;
617 return copy;
620 static tree
621 omp_copy_decl_1 (tree var, omp_context *ctx)
623 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
626 /* Build tree nodes to access the field for VAR on the receiver side. */
628 static tree
629 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
631 tree x, field = lookup_field (var, ctx);
633 /* If the receiver record type was remapped in the child function,
634 remap the field into the new record type. */
635 x = maybe_lookup_field (field, ctx);
636 if (x != NULL)
637 field = x;
639 x = build_simple_mem_ref (ctx->receiver_decl);
640 TREE_THIS_NOTRAP (x) = 1;
641 x = omp_build_component_ref (x, field);
642 if (by_ref)
644 x = build_simple_mem_ref (x);
645 TREE_THIS_NOTRAP (x) = 1;
648 return x;
651 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
652 of a parallel, this is a component reference; for workshare constructs
653 this is some variable. */
655 static tree
656 build_outer_var_ref (tree var, omp_context *ctx,
657 enum omp_clause_code code = OMP_CLAUSE_ERROR)
659 tree x;
660 omp_context *outer = ctx->outer;
661 for (; outer; outer = outer->outer)
663 if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
664 continue;
665 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
666 && !maybe_lookup_decl (var, outer))
667 continue;
668 break;
671 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
672 x = var;
673 else if (is_variable_sized (var))
675 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
676 x = build_outer_var_ref (x, ctx, code);
677 x = build_simple_mem_ref (x);
679 else if (is_taskreg_ctx (ctx))
681 bool by_ref = use_pointer_for_field (var, NULL);
682 x = build_receiver_ref (var, by_ref, ctx);
684 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
685 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
686 || ctx->loop_p
687 || code == OMP_CLAUSE_ALLOCATE
688 || (code == OMP_CLAUSE_PRIVATE
689 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
690 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
691 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
693 /* #pragma omp simd isn't a worksharing construct, and can reference
694 even private vars in its linear etc. clauses.
695 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
696 to private vars in all worksharing constructs. */
697 x = NULL_TREE;
698 if (outer && is_taskreg_ctx (outer))
699 x = lookup_decl (var, outer);
700 else if (outer)
701 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
702 if (x == NULL_TREE)
703 x = var;
705 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
707 gcc_assert (outer);
708 splay_tree_node n
709 = splay_tree_lookup (outer->field_map,
710 (splay_tree_key) &DECL_UID (var));
711 if (n == NULL)
713 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
714 x = var;
715 else
716 x = lookup_decl (var, outer);
718 else
720 tree field = (tree) n->value;
721 /* If the receiver record type was remapped in the child function,
722 remap the field into the new record type. */
723 x = maybe_lookup_field (field, outer);
724 if (x != NULL)
725 field = x;
727 x = build_simple_mem_ref (outer->receiver_decl);
728 x = omp_build_component_ref (x, field);
729 if (use_pointer_for_field (var, outer))
730 x = build_simple_mem_ref (x);
733 else if (outer)
734 x = lookup_decl (var, outer);
735 else if (omp_privatize_by_reference (var))
736 /* This can happen with orphaned constructs. If var is reference, it is
737 possible it is shared and as such valid. */
738 x = var;
739 else if (omp_member_access_dummy_var (var))
740 x = var;
741 else
742 gcc_unreachable ();
744 if (x == var)
746 tree t = omp_member_access_dummy_var (var);
747 if (t)
749 x = DECL_VALUE_EXPR (var);
750 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
751 if (o != t)
752 x = unshare_and_remap (x, t, o);
753 else
754 x = unshare_expr (x);
758 if (omp_privatize_by_reference (var))
759 x = build_simple_mem_ref (x);
761 return x;
764 /* Build tree nodes to access the field for VAR on the sender side. */
766 static tree
767 build_sender_ref (splay_tree_key key, omp_context *ctx)
769 tree field = lookup_sfield (key, ctx);
770 return omp_build_component_ref (ctx->sender_decl, field);
773 static tree
774 build_sender_ref (tree var, omp_context *ctx)
776 return build_sender_ref ((splay_tree_key) var, ctx);
779 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
780 BASE_POINTERS_RESTRICT, declare the field with restrict. */
782 static void
783 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
785 tree field, type, sfield = NULL_TREE;
786 splay_tree_key key = (splay_tree_key) var;
788 if ((mask & 16) != 0)
790 key = (splay_tree_key) &DECL_NAME (var);
791 gcc_checking_assert (key != (splay_tree_key) var);
793 if ((mask & 8) != 0)
795 key = (splay_tree_key) &DECL_UID (var);
796 gcc_checking_assert (key != (splay_tree_key) var);
798 gcc_assert ((mask & 1) == 0
799 || !splay_tree_lookup (ctx->field_map, key));
800 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
801 || !splay_tree_lookup (ctx->sfield_map, key));
802 gcc_assert ((mask & 3) == 3
803 || !is_gimple_omp_oacc (ctx->stmt));
805 type = TREE_TYPE (var);
806 if ((mask & 16) != 0)
807 type = lang_hooks.decls.omp_array_data (var, true);
809 /* Prevent redeclaring the var in the split-off function with a restrict
810 pointer type. Note that we only clear type itself, restrict qualifiers in
811 the pointed-to type will be ignored by points-to analysis. */
812 if (POINTER_TYPE_P (type)
813 && TYPE_RESTRICT (type))
814 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
816 if (mask & 4)
818 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
819 type = build_pointer_type (build_pointer_type (type));
821 else if (by_ref)
822 type = build_pointer_type (type);
823 else if ((mask & (32 | 3)) == 1
824 && omp_privatize_by_reference (var))
825 type = TREE_TYPE (type);
827 field = build_decl (DECL_SOURCE_LOCATION (var),
828 FIELD_DECL, DECL_NAME (var), type);
830 /* Remember what variable this field was created for. This does have a
831 side effect of making dwarf2out ignore this member, so for helpful
832 debugging we clear it later in delete_omp_context. */
833 DECL_ABSTRACT_ORIGIN (field) = var;
834 if ((mask & 16) == 0 && type == TREE_TYPE (var))
836 SET_DECL_ALIGN (field, DECL_ALIGN (var));
837 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
838 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
840 else
841 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
843 if ((mask & 3) == 3)
845 insert_field_into_struct (ctx->record_type, field);
846 if (ctx->srecord_type)
848 sfield = build_decl (DECL_SOURCE_LOCATION (var),
849 FIELD_DECL, DECL_NAME (var), type);
850 DECL_ABSTRACT_ORIGIN (sfield) = var;
851 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
852 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
853 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
854 insert_field_into_struct (ctx->srecord_type, sfield);
857 else
859 if (ctx->srecord_type == NULL_TREE)
861 tree t;
863 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
864 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
865 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
867 sfield = build_decl (DECL_SOURCE_LOCATION (t),
868 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
869 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
870 insert_field_into_struct (ctx->srecord_type, sfield);
871 splay_tree_insert (ctx->sfield_map,
872 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
873 (splay_tree_value) sfield);
876 sfield = field;
877 insert_field_into_struct ((mask & 1) ? ctx->record_type
878 : ctx->srecord_type, field);
881 if (mask & 1)
882 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
883 if ((mask & 2) && ctx->sfield_map)
884 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
887 static tree
888 install_var_local (tree var, omp_context *ctx)
890 tree new_var = omp_copy_decl_1 (var, ctx);
891 insert_decl_map (&ctx->cb, var, new_var);
892 return new_var;
895 /* Adjust the replacement for DECL in CTX for the new context. This means
896 copying the DECL_VALUE_EXPR, and fixing up the type. */
898 static void
899 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
901 tree new_decl, size;
903 new_decl = lookup_decl (decl, ctx);
905 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
907 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
908 && DECL_HAS_VALUE_EXPR_P (decl))
910 tree ve = DECL_VALUE_EXPR (decl);
911 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
912 SET_DECL_VALUE_EXPR (new_decl, ve);
913 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
916 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
918 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
919 if (size == error_mark_node)
920 size = TYPE_SIZE (TREE_TYPE (new_decl));
921 DECL_SIZE (new_decl) = size;
923 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
924 if (size == error_mark_node)
925 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
926 DECL_SIZE_UNIT (new_decl) = size;
930 /* The callback for remap_decl. Search all containing contexts for a
931 mapping of the variable; this avoids having to duplicate the splay
932 tree ahead of time. We know a mapping doesn't already exist in the
933 given context. Create new mappings to implement default semantics. */
935 static tree
936 omp_copy_decl (tree var, copy_body_data *cb)
938 omp_context *ctx = (omp_context *) cb;
939 tree new_var;
941 if (TREE_CODE (var) == LABEL_DECL)
943 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
944 return var;
945 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
946 DECL_CONTEXT (new_var) = current_function_decl;
947 insert_decl_map (&ctx->cb, var, new_var);
948 return new_var;
951 while (!is_taskreg_ctx (ctx))
953 ctx = ctx->outer;
954 if (ctx == NULL)
955 return var;
956 new_var = maybe_lookup_decl (var, ctx);
957 if (new_var)
958 return new_var;
961 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
962 return var;
964 return error_mark_node;
967 /* Create a new context, with OUTER_CTX being the surrounding context. */
969 static omp_context *
970 new_omp_context (gimple *stmt, omp_context *outer_ctx)
972 omp_context *ctx = XCNEW (omp_context);
974 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
975 (splay_tree_value) ctx);
976 ctx->stmt = stmt;
978 if (outer_ctx)
980 ctx->outer = outer_ctx;
981 ctx->cb = outer_ctx->cb;
982 ctx->cb.block = NULL;
983 ctx->depth = outer_ctx->depth + 1;
985 else
987 ctx->cb.src_fn = current_function_decl;
988 ctx->cb.dst_fn = current_function_decl;
989 ctx->cb.src_node = cgraph_node::get (current_function_decl);
990 gcc_checking_assert (ctx->cb.src_node);
991 ctx->cb.dst_node = ctx->cb.src_node;
992 ctx->cb.src_cfun = cfun;
993 ctx->cb.copy_decl = omp_copy_decl;
994 ctx->cb.eh_lp_nr = 0;
995 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
996 ctx->cb.adjust_array_error_bounds = true;
997 ctx->cb.dont_remap_vla_if_no_change = true;
998 ctx->depth = 1;
1001 ctx->cb.decl_map = new hash_map<tree, tree>;
1003 return ctx;
1006 static gimple_seq maybe_catch_exception (gimple_seq);
1008 /* Finalize task copyfn. */
1010 static void
1011 finalize_task_copyfn (gomp_task *task_stmt)
1013 struct function *child_cfun;
1014 tree child_fn;
1015 gimple_seq seq = NULL, new_seq;
1016 gbind *bind;
1018 child_fn = gimple_omp_task_copy_fn (task_stmt);
1019 if (child_fn == NULL_TREE)
1020 return;
1022 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1023 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1025 push_cfun (child_cfun);
1026 bind = gimplify_body (child_fn, false);
1027 gimple_seq_add_stmt (&seq, bind);
1028 new_seq = maybe_catch_exception (seq);
1029 if (new_seq != seq)
1031 bind = gimple_build_bind (NULL, new_seq, NULL);
1032 seq = NULL;
1033 gimple_seq_add_stmt (&seq, bind);
1035 gimple_set_body (child_fn, seq);
1036 pop_cfun ();
1038 /* Inform the callgraph about the new function. */
1039 cgraph_node *node = cgraph_node::get_create (child_fn);
1040 node->parallelized_function = 1;
1041 cgraph_node::add_new_function (child_fn, false);
1044 /* Destroy a omp_context data structures. Called through the splay tree
1045 value delete callback. */
1047 static void
1048 delete_omp_context (splay_tree_value value)
1050 omp_context *ctx = (omp_context *) value;
1052 delete ctx->cb.decl_map;
1054 if (ctx->field_map)
1055 splay_tree_delete (ctx->field_map);
1056 if (ctx->sfield_map)
1057 splay_tree_delete (ctx->sfield_map);
1059 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1060 it produces corrupt debug information. */
1061 if (ctx->record_type)
1063 tree t;
1064 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1065 DECL_ABSTRACT_ORIGIN (t) = NULL;
1067 if (ctx->srecord_type)
1069 tree t;
1070 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1071 DECL_ABSTRACT_ORIGIN (t) = NULL;
1074 if (ctx->task_reduction_map)
1076 ctx->task_reductions.release ();
1077 delete ctx->task_reduction_map;
1080 delete ctx->lastprivate_conditional_map;
1081 delete ctx->allocate_map;
1083 XDELETE (ctx);
1086 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1087 context. */
1089 static void
1090 fixup_child_record_type (omp_context *ctx)
1092 tree f, type = ctx->record_type;
1094 if (!ctx->receiver_decl)
1095 return;
1096 /* ??? It isn't sufficient to just call remap_type here, because
1097 variably_modified_type_p doesn't work the way we expect for
1098 record types. Testing each field for whether it needs remapping
1099 and creating a new record by hand works, however. */
1100 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1101 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1102 break;
1103 if (f)
1105 tree name, new_fields = NULL;
1107 type = lang_hooks.types.make_type (RECORD_TYPE);
1108 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1109 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1110 TYPE_DECL, name, type);
1111 TYPE_NAME (type) = name;
1113 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1115 tree new_f = copy_node (f);
1116 DECL_CONTEXT (new_f) = type;
1117 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1118 DECL_CHAIN (new_f) = new_fields;
1119 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1120 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1121 &ctx->cb, NULL);
1122 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1123 &ctx->cb, NULL);
1124 new_fields = new_f;
1126 /* Arrange to be able to look up the receiver field
1127 given the sender field. */
1128 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1129 (splay_tree_value) new_f);
1131 TYPE_FIELDS (type) = nreverse (new_fields);
1132 layout_type (type);
1135 /* In a target region we never modify any of the pointers in *.omp_data_i,
1136 so attempt to help the optimizers. */
1137 if (is_gimple_omp_offloaded (ctx->stmt))
1138 type = build_qualified_type (type, TYPE_QUAL_CONST);
1140 TREE_TYPE (ctx->receiver_decl)
1141 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1144 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1145 specified by CLAUSES. */
1147 static void
1148 scan_sharing_clauses (tree clauses, omp_context *ctx)
1150 tree c, decl;
1151 bool scan_array_reductions = false;
1153 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1154 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1155 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1156 /* omp_default_mem_alloc is 1 */
1157 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1158 || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1160 /* The allocate clauses that appear on a target construct or on
1161 constructs in a target region must specify an allocator expression
1162 unless a requires directive with the dynamic_allocators clause
1163 is present in the same compilation unit. */
1164 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1165 && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS) == 0)
1166 && omp_maybe_offloaded_ctx (ctx))
1167 error_at (OMP_CLAUSE_LOCATION (c), "%<allocate%> clause must"
1168 " specify an allocator here");
1169 if (ctx->allocate_map == NULL)
1170 ctx->allocate_map = new hash_map<tree, tree>;
1171 tree val = integer_zero_node;
1172 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1173 val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1174 if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1175 val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1176 ctx->allocate_map->put (OMP_CLAUSE_DECL (c), val);
1179 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1181 bool by_ref;
1183 switch (OMP_CLAUSE_CODE (c))
1185 case OMP_CLAUSE_PRIVATE:
1186 decl = OMP_CLAUSE_DECL (c);
1187 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1188 goto do_private;
1189 else if (!is_variable_sized (decl))
1190 install_var_local (decl, ctx);
1191 break;
1193 case OMP_CLAUSE_SHARED:
1194 decl = OMP_CLAUSE_DECL (c);
1195 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1196 ctx->allocate_map->remove (decl);
1197 /* Ignore shared directives in teams construct inside of
1198 target construct. */
1199 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1200 && !is_host_teams_ctx (ctx))
1202 /* Global variables don't need to be copied,
1203 the receiver side will use them directly. */
1204 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1205 if (is_global_var (odecl))
1206 break;
1207 insert_decl_map (&ctx->cb, decl, odecl);
1208 break;
1210 gcc_assert (is_taskreg_ctx (ctx));
1211 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1212 || !is_variable_sized (decl));
1213 /* Global variables don't need to be copied,
1214 the receiver side will use them directly. */
1215 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1216 break;
1217 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1219 use_pointer_for_field (decl, ctx);
1220 break;
1222 by_ref = use_pointer_for_field (decl, NULL);
1223 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1224 || TREE_ADDRESSABLE (decl)
1225 || by_ref
1226 || omp_privatize_by_reference (decl))
1228 by_ref = use_pointer_for_field (decl, ctx);
1229 install_var_field (decl, by_ref, 3, ctx);
1230 install_var_local (decl, ctx);
1231 break;
1233 /* We don't need to copy const scalar vars back. */
1234 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1235 goto do_private;
1237 case OMP_CLAUSE_REDUCTION:
1238 /* Collect 'reduction' clauses on OpenACC compute construct. */
1239 if (is_gimple_omp_oacc (ctx->stmt)
1240 && is_gimple_omp_offloaded (ctx->stmt))
1242 /* No 'reduction' clauses on OpenACC 'kernels'. */
1243 gcc_checking_assert (!is_oacc_kernels (ctx));
1244 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1245 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1247 ctx->local_reduction_clauses
1248 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1250 /* FALLTHRU */
1252 case OMP_CLAUSE_IN_REDUCTION:
1253 decl = OMP_CLAUSE_DECL (c);
1254 if (ctx->allocate_map
1255 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1256 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1257 || OMP_CLAUSE_REDUCTION_TASK (c)))
1258 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1259 || is_task_ctx (ctx)))
1261 /* For now. */
1262 if (ctx->allocate_map->get (decl))
1263 ctx->allocate_map->remove (decl);
1265 if (TREE_CODE (decl) == MEM_REF)
1267 tree t = TREE_OPERAND (decl, 0);
1268 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1269 t = TREE_OPERAND (t, 0);
1270 if (INDIRECT_REF_P (t)
1271 || TREE_CODE (t) == ADDR_EXPR)
1272 t = TREE_OPERAND (t, 0);
1273 if (is_omp_target (ctx->stmt))
1275 if (is_variable_sized (t))
1277 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1278 t = DECL_VALUE_EXPR (t);
1279 gcc_assert (INDIRECT_REF_P (t));
1280 t = TREE_OPERAND (t, 0);
1281 gcc_assert (DECL_P (t));
1283 tree at = t;
1284 if (ctx->outer)
1285 scan_omp_op (&at, ctx->outer);
1286 tree nt = omp_copy_decl_1 (at, ctx->outer);
1287 splay_tree_insert (ctx->field_map,
1288 (splay_tree_key) &DECL_CONTEXT (t),
1289 (splay_tree_value) nt);
1290 if (at != t)
1291 splay_tree_insert (ctx->field_map,
1292 (splay_tree_key) &DECL_CONTEXT (at),
1293 (splay_tree_value) nt);
1294 break;
1296 install_var_local (t, ctx);
1297 if (is_taskreg_ctx (ctx)
1298 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1299 || (is_task_ctx (ctx)
1300 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1301 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1302 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1303 == POINTER_TYPE)))))
1304 && !is_variable_sized (t)
1305 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1306 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1307 && !is_task_ctx (ctx))))
1309 by_ref = use_pointer_for_field (t, NULL);
1310 if (is_task_ctx (ctx)
1311 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1312 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1314 install_var_field (t, false, 1, ctx);
1315 install_var_field (t, by_ref, 2, ctx);
1317 else
1318 install_var_field (t, by_ref, 3, ctx);
1320 break;
1322 if (is_omp_target (ctx->stmt))
1324 tree at = decl;
1325 if (ctx->outer)
1326 scan_omp_op (&at, ctx->outer);
1327 tree nt = omp_copy_decl_1 (at, ctx->outer);
1328 splay_tree_insert (ctx->field_map,
1329 (splay_tree_key) &DECL_CONTEXT (decl),
1330 (splay_tree_value) nt);
1331 if (at != decl)
1332 splay_tree_insert (ctx->field_map,
1333 (splay_tree_key) &DECL_CONTEXT (at),
1334 (splay_tree_value) nt);
1335 break;
1337 if (is_task_ctx (ctx)
1338 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1339 && OMP_CLAUSE_REDUCTION_TASK (c)
1340 && is_parallel_ctx (ctx)))
1342 /* Global variables don't need to be copied,
1343 the receiver side will use them directly. */
1344 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1346 by_ref = use_pointer_for_field (decl, ctx);
1347 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1348 install_var_field (decl, by_ref, 3, ctx);
1350 install_var_local (decl, ctx);
1351 break;
1353 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1354 && OMP_CLAUSE_REDUCTION_TASK (c))
1356 install_var_local (decl, ctx);
1357 break;
1359 goto do_private;
1361 case OMP_CLAUSE_LASTPRIVATE:
1362 /* Let the corresponding firstprivate clause create
1363 the variable. */
1364 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1365 break;
1366 /* FALLTHRU */
1368 case OMP_CLAUSE_FIRSTPRIVATE:
1369 case OMP_CLAUSE_LINEAR:
1370 decl = OMP_CLAUSE_DECL (c);
1371 do_private:
1372 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1373 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1374 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1375 && is_gimple_omp_offloaded (ctx->stmt))
1377 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1378 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR
1379 && lang_hooks.decls.omp_array_data (decl, true)))
1381 by_ref = !omp_privatize_by_reference (decl);
1382 install_var_field (decl, by_ref, 3, ctx);
1384 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1386 if (INDIRECT_REF_P (decl))
1387 decl = TREE_OPERAND (decl, 0);
1388 install_var_field (decl, true, 3, ctx);
1390 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1391 install_var_field (decl, true, 3, ctx);
1392 else
1393 install_var_field (decl, false, 3, ctx);
1395 if (is_variable_sized (decl))
1397 if (is_task_ctx (ctx))
1399 if (ctx->allocate_map
1400 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1402 /* For now. */
1403 if (ctx->allocate_map->get (decl))
1404 ctx->allocate_map->remove (decl);
1406 install_var_field (decl, false, 1, ctx);
1408 break;
1410 else if (is_taskreg_ctx (ctx))
1412 bool global
1413 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1414 by_ref = use_pointer_for_field (decl, NULL);
1416 if (is_task_ctx (ctx)
1417 && (global || by_ref || omp_privatize_by_reference (decl)))
1419 if (ctx->allocate_map
1420 && ctx->allocate_map->get (decl))
1421 install_var_field (decl, by_ref, 32 | 1, ctx);
1422 else
1423 install_var_field (decl, false, 1, ctx);
1424 if (!global)
1425 install_var_field (decl, by_ref, 2, ctx);
1427 else if (!global)
1428 install_var_field (decl, by_ref, 3, ctx);
1430 install_var_local (decl, ctx);
1431 /* For descr arrays on target: firstprivatize data + attach ptr. */
1432 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1433 && is_gimple_omp_offloaded (ctx->stmt)
1434 && !is_gimple_omp_oacc (ctx->stmt)
1435 && lang_hooks.decls.omp_array_data (decl, true))
1437 install_var_field (decl, false, 16 | 3, ctx);
1438 install_var_field (decl, true, 8 | 3, ctx);
1440 break;
1442 case OMP_CLAUSE_USE_DEVICE_PTR:
1443 case OMP_CLAUSE_USE_DEVICE_ADDR:
1444 decl = OMP_CLAUSE_DECL (c);
1446 /* Fortran array descriptors. */
1447 if (lang_hooks.decls.omp_array_data (decl, true))
1448 install_var_field (decl, false, 19, ctx);
1449 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1450 && !omp_privatize_by_reference (decl)
1451 && !omp_is_allocatable_or_ptr (decl))
1452 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1453 install_var_field (decl, true, 11, ctx);
1454 else
1455 install_var_field (decl, false, 11, ctx);
1456 if (DECL_SIZE (decl)
1457 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1459 tree decl2 = DECL_VALUE_EXPR (decl);
1460 gcc_assert (INDIRECT_REF_P (decl2));
1461 decl2 = TREE_OPERAND (decl2, 0);
1462 gcc_assert (DECL_P (decl2));
1463 install_var_local (decl2, ctx);
1465 install_var_local (decl, ctx);
1466 break;
1468 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1469 decl = OMP_CLAUSE_DECL (c);
1470 while (INDIRECT_REF_P (decl)
1471 || TREE_CODE (decl) == ARRAY_REF)
1472 decl = TREE_OPERAND (decl, 0);
1473 goto do_private;
1475 case OMP_CLAUSE_IS_DEVICE_PTR:
1476 decl = OMP_CLAUSE_DECL (c);
1477 goto do_private;
1479 case OMP_CLAUSE__LOOPTEMP_:
1480 case OMP_CLAUSE__REDUCTEMP_:
1481 gcc_assert (is_taskreg_ctx (ctx));
1482 decl = OMP_CLAUSE_DECL (c);
1483 install_var_field (decl, false, 3, ctx);
1484 install_var_local (decl, ctx);
1485 break;
1487 case OMP_CLAUSE_COPYPRIVATE:
1488 case OMP_CLAUSE_COPYIN:
1489 decl = OMP_CLAUSE_DECL (c);
1490 by_ref = use_pointer_for_field (decl, NULL);
1491 install_var_field (decl, by_ref, 3, ctx);
1492 break;
1494 case OMP_CLAUSE_FINAL:
1495 case OMP_CLAUSE_IF:
1496 case OMP_CLAUSE_NUM_THREADS:
1497 case OMP_CLAUSE_NUM_TEAMS:
1498 case OMP_CLAUSE_THREAD_LIMIT:
1499 case OMP_CLAUSE_DEVICE:
1500 case OMP_CLAUSE_SCHEDULE:
1501 case OMP_CLAUSE_DIST_SCHEDULE:
1502 case OMP_CLAUSE_DEPEND:
1503 case OMP_CLAUSE_PRIORITY:
1504 case OMP_CLAUSE_GRAINSIZE:
1505 case OMP_CLAUSE_NUM_TASKS:
1506 case OMP_CLAUSE_NUM_GANGS:
1507 case OMP_CLAUSE_NUM_WORKERS:
1508 case OMP_CLAUSE_VECTOR_LENGTH:
1509 case OMP_CLAUSE_DETACH:
1510 case OMP_CLAUSE_FILTER:
1511 if (ctx->outer)
1512 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1513 break;
1515 case OMP_CLAUSE_TO:
1516 case OMP_CLAUSE_FROM:
1517 case OMP_CLAUSE_MAP:
1518 if (ctx->outer)
1519 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1520 decl = OMP_CLAUSE_DECL (c);
1521 /* If requested, make 'decl' addressable. */
1522 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1523 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c))
1525 gcc_checking_assert (DECL_P (decl));
1527 bool decl_addressable = TREE_ADDRESSABLE (decl);
1528 if (!decl_addressable)
1530 if (!make_addressable_vars)
1531 make_addressable_vars = BITMAP_ALLOC (NULL);
1532 bitmap_set_bit (make_addressable_vars, DECL_UID (decl));
1533 TREE_ADDRESSABLE (decl) = 1;
1536 if (dump_enabled_p ())
1538 location_t loc = OMP_CLAUSE_LOCATION (c);
1539 const dump_user_location_t d_u_loc
1540 = dump_user_location_t::from_location_t (loc);
1541 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1542 #if __GNUC__ >= 10
1543 # pragma GCC diagnostic push
1544 # pragma GCC diagnostic ignored "-Wformat"
1545 #endif
1546 if (!decl_addressable)
1547 dump_printf_loc (MSG_NOTE, d_u_loc,
1548 "variable %<%T%>"
1549 " made addressable\n",
1550 decl);
1551 else
1552 dump_printf_loc (MSG_NOTE, d_u_loc,
1553 "variable %<%T%>"
1554 " already made addressable\n",
1555 decl);
1556 #if __GNUC__ >= 10
1557 # pragma GCC diagnostic pop
1558 #endif
1561 /* Done. */
1562 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c) = 0;
1564 /* Global variables with "omp declare target" attribute
1565 don't need to be copied, the receiver side will use them
1566 directly. However, global variables with "omp declare target link"
1567 attribute need to be copied. Or when ALWAYS modifier is used. */
1568 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1569 && DECL_P (decl)
1570 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1571 && (OMP_CLAUSE_MAP_KIND (c)
1572 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1573 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1574 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1575 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1576 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1577 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1578 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1579 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_TO
1580 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_FROM
1581 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_TOFROM
1582 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1583 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1584 && varpool_node::get_create (decl)->offloadable
1585 && !lookup_attribute ("omp declare target link",
1586 DECL_ATTRIBUTES (decl)))
1587 break;
1588 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1589 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1591 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1592 not offloaded; there is nothing to map for those. */
1593 if (!is_gimple_omp_offloaded (ctx->stmt)
1594 && !POINTER_TYPE_P (TREE_TYPE (decl))
1595 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1596 break;
1598 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1599 && DECL_P (decl)
1600 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1601 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1602 && is_omp_target (ctx->stmt))
1604 /* If this is an offloaded region, an attach operation should
1605 only exist when the pointer variable is mapped in a prior
1606 clause.
1607 If we had an error, we may not have attempted to sort clauses
1608 properly, so avoid the test. */
1609 if (is_gimple_omp_offloaded (ctx->stmt)
1610 && !seen_error ())
1611 gcc_assert
1612 (maybe_lookup_decl (decl, ctx)
1613 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1614 && lookup_attribute ("omp declare target",
1615 DECL_ATTRIBUTES (decl))));
1617 /* By itself, attach/detach is generated as part of pointer
1618 variable mapping and should not create new variables in the
1619 offloaded region, however sender refs for it must be created
1620 for its address to be passed to the runtime. */
1621 tree field
1622 = build_decl (OMP_CLAUSE_LOCATION (c),
1623 FIELD_DECL, NULL_TREE, ptr_type_node);
1624 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1625 insert_field_into_struct (ctx->record_type, field);
1626 /* To not clash with a map of the pointer variable itself,
1627 attach/detach maps have their field looked up by the *clause*
1628 tree expression, not the decl. */
1629 gcc_assert (!splay_tree_lookup (ctx->field_map,
1630 (splay_tree_key) c));
1631 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1632 (splay_tree_value) field);
1633 break;
1635 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1636 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1637 || (OMP_CLAUSE_MAP_KIND (c)
1638 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1640 if (TREE_CODE (decl) == COMPONENT_REF
1641 || (INDIRECT_REF_P (decl)
1642 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1643 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1644 == REFERENCE_TYPE)
1645 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1646 == POINTER_TYPE)))))
1647 break;
1648 if (DECL_SIZE (decl)
1649 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1651 tree decl2 = DECL_VALUE_EXPR (decl);
1652 gcc_assert (INDIRECT_REF_P (decl2));
1653 decl2 = TREE_OPERAND (decl2, 0);
1654 gcc_assert (DECL_P (decl2));
1655 install_var_local (decl2, ctx);
1657 install_var_local (decl, ctx);
1658 break;
1660 if (DECL_P (decl))
1662 if (DECL_SIZE (decl)
1663 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1665 tree decl2 = DECL_VALUE_EXPR (decl);
1666 gcc_assert (INDIRECT_REF_P (decl2));
1667 decl2 = TREE_OPERAND (decl2, 0);
1668 gcc_assert (DECL_P (decl2));
1669 install_var_field (decl2, true, 3, ctx);
1670 install_var_local (decl2, ctx);
1671 install_var_local (decl, ctx);
1673 else
1675 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1676 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1677 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1678 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1679 install_var_field (decl, true, 7, ctx);
1680 else
1681 install_var_field (decl, true, 3, ctx);
1682 if (is_gimple_omp_offloaded (ctx->stmt)
1683 && !(is_gimple_omp_oacc (ctx->stmt)
1684 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1685 install_var_local (decl, ctx);
1688 else
1690 tree base = get_base_address (decl);
1691 tree nc = OMP_CLAUSE_CHAIN (c);
1692 if (DECL_P (base)
1693 && nc != NULL_TREE
1694 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1695 && OMP_CLAUSE_DECL (nc) == base
1696 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1697 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1699 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1700 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1702 else
1704 if (ctx->outer)
1706 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1707 decl = OMP_CLAUSE_DECL (c);
1709 gcc_assert (!splay_tree_lookup (ctx->field_map,
1710 (splay_tree_key) decl));
1711 tree field
1712 = build_decl (OMP_CLAUSE_LOCATION (c),
1713 FIELD_DECL, NULL_TREE, ptr_type_node);
1714 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1715 insert_field_into_struct (ctx->record_type, field);
1716 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1717 (splay_tree_value) field);
1720 break;
1722 case OMP_CLAUSE_ORDER:
1723 ctx->order_concurrent = true;
1724 break;
1726 case OMP_CLAUSE_BIND:
1727 ctx->loop_p = true;
1728 break;
1730 case OMP_CLAUSE_NOWAIT:
1731 case OMP_CLAUSE_ORDERED:
1732 case OMP_CLAUSE_COLLAPSE:
1733 case OMP_CLAUSE_UNTIED:
1734 case OMP_CLAUSE_MERGEABLE:
1735 case OMP_CLAUSE_PROC_BIND:
1736 case OMP_CLAUSE_SAFELEN:
1737 case OMP_CLAUSE_SIMDLEN:
1738 case OMP_CLAUSE_THREADS:
1739 case OMP_CLAUSE_SIMD:
1740 case OMP_CLAUSE_NOGROUP:
1741 case OMP_CLAUSE_DEFAULTMAP:
1742 case OMP_CLAUSE_ASYNC:
1743 case OMP_CLAUSE_WAIT:
1744 case OMP_CLAUSE_GANG:
1745 case OMP_CLAUSE_WORKER:
1746 case OMP_CLAUSE_VECTOR:
1747 case OMP_CLAUSE_INDEPENDENT:
1748 case OMP_CLAUSE_AUTO:
1749 case OMP_CLAUSE_SEQ:
1750 case OMP_CLAUSE_TILE:
1751 case OMP_CLAUSE__SIMT_:
1752 case OMP_CLAUSE_DEFAULT:
1753 case OMP_CLAUSE_NONTEMPORAL:
1754 case OMP_CLAUSE_IF_PRESENT:
1755 case OMP_CLAUSE_FINALIZE:
1756 case OMP_CLAUSE_TASK_REDUCTION:
1757 case OMP_CLAUSE_ALLOCATE:
1758 break;
1760 case OMP_CLAUSE_ALIGNED:
1761 decl = OMP_CLAUSE_DECL (c);
1762 if (is_global_var (decl)
1763 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1764 install_var_local (decl, ctx);
1765 break;
1767 case OMP_CLAUSE__CONDTEMP_:
1768 decl = OMP_CLAUSE_DECL (c);
1769 if (is_parallel_ctx (ctx))
1771 install_var_field (decl, false, 3, ctx);
1772 install_var_local (decl, ctx);
1774 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1775 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1776 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1777 install_var_local (decl, ctx);
1778 break;
1780 case OMP_CLAUSE__CACHE_:
1781 case OMP_CLAUSE_NOHOST:
1782 default:
1783 gcc_unreachable ();
1787 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1789 switch (OMP_CLAUSE_CODE (c))
1791 case OMP_CLAUSE_LASTPRIVATE:
1792 /* Let the corresponding firstprivate clause create
1793 the variable. */
1794 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1795 scan_array_reductions = true;
1796 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1797 break;
1798 /* FALLTHRU */
1800 case OMP_CLAUSE_FIRSTPRIVATE:
1801 case OMP_CLAUSE_PRIVATE:
1802 case OMP_CLAUSE_LINEAR:
1803 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1804 case OMP_CLAUSE_IS_DEVICE_PTR:
1805 decl = OMP_CLAUSE_DECL (c);
1806 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1808 while (INDIRECT_REF_P (decl)
1809 || TREE_CODE (decl) == ARRAY_REF)
1810 decl = TREE_OPERAND (decl, 0);
1813 if (is_variable_sized (decl))
1815 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1816 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1817 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1818 && is_gimple_omp_offloaded (ctx->stmt))
1820 tree decl2 = DECL_VALUE_EXPR (decl);
1821 gcc_assert (INDIRECT_REF_P (decl2));
1822 decl2 = TREE_OPERAND (decl2, 0);
1823 gcc_assert (DECL_P (decl2));
1824 install_var_local (decl2, ctx);
1825 fixup_remapped_decl (decl2, ctx, false);
1827 install_var_local (decl, ctx);
1829 fixup_remapped_decl (decl, ctx,
1830 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1831 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1832 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1833 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1834 scan_array_reductions = true;
1835 break;
1837 case OMP_CLAUSE_REDUCTION:
1838 case OMP_CLAUSE_IN_REDUCTION:
1839 decl = OMP_CLAUSE_DECL (c);
1840 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1842 if (is_variable_sized (decl))
1843 install_var_local (decl, ctx);
1844 fixup_remapped_decl (decl, ctx, false);
1846 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1847 scan_array_reductions = true;
1848 break;
1850 case OMP_CLAUSE_TASK_REDUCTION:
1851 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1852 scan_array_reductions = true;
1853 break;
1855 case OMP_CLAUSE_SHARED:
1856 /* Ignore shared directives in teams construct inside of
1857 target construct. */
1858 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1859 && !is_host_teams_ctx (ctx))
1860 break;
1861 decl = OMP_CLAUSE_DECL (c);
1862 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1863 break;
1864 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1866 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1867 ctx->outer)))
1868 break;
1869 bool by_ref = use_pointer_for_field (decl, ctx);
1870 install_var_field (decl, by_ref, 11, ctx);
1871 break;
1873 fixup_remapped_decl (decl, ctx, false);
1874 break;
1876 case OMP_CLAUSE_MAP:
1877 if (!is_gimple_omp_offloaded (ctx->stmt))
1878 break;
1879 decl = OMP_CLAUSE_DECL (c);
1880 if (DECL_P (decl)
1881 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1882 && (OMP_CLAUSE_MAP_KIND (c)
1883 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1884 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1885 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1886 && varpool_node::get_create (decl)->offloadable)
1887 break;
1888 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1889 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1890 && is_omp_target (ctx->stmt)
1891 && !is_gimple_omp_offloaded (ctx->stmt))
1892 break;
1893 if (DECL_P (decl))
1895 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1896 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1897 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1898 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1900 tree new_decl = lookup_decl (decl, ctx);
1901 TREE_TYPE (new_decl)
1902 = remap_type (TREE_TYPE (decl), &ctx->cb);
1904 else if (DECL_SIZE (decl)
1905 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1907 tree decl2 = DECL_VALUE_EXPR (decl);
1908 gcc_assert (INDIRECT_REF_P (decl2));
1909 decl2 = TREE_OPERAND (decl2, 0);
1910 gcc_assert (DECL_P (decl2));
1911 fixup_remapped_decl (decl2, ctx, false);
1912 fixup_remapped_decl (decl, ctx, true);
1914 else
1915 fixup_remapped_decl (decl, ctx, false);
1917 break;
1919 case OMP_CLAUSE_COPYPRIVATE:
1920 case OMP_CLAUSE_COPYIN:
1921 case OMP_CLAUSE_DEFAULT:
1922 case OMP_CLAUSE_IF:
1923 case OMP_CLAUSE_NUM_THREADS:
1924 case OMP_CLAUSE_NUM_TEAMS:
1925 case OMP_CLAUSE_THREAD_LIMIT:
1926 case OMP_CLAUSE_DEVICE:
1927 case OMP_CLAUSE_SCHEDULE:
1928 case OMP_CLAUSE_DIST_SCHEDULE:
1929 case OMP_CLAUSE_NOWAIT:
1930 case OMP_CLAUSE_ORDERED:
1931 case OMP_CLAUSE_COLLAPSE:
1932 case OMP_CLAUSE_UNTIED:
1933 case OMP_CLAUSE_FINAL:
1934 case OMP_CLAUSE_MERGEABLE:
1935 case OMP_CLAUSE_PROC_BIND:
1936 case OMP_CLAUSE_SAFELEN:
1937 case OMP_CLAUSE_SIMDLEN:
1938 case OMP_CLAUSE_ALIGNED:
1939 case OMP_CLAUSE_DEPEND:
1940 case OMP_CLAUSE_DETACH:
1941 case OMP_CLAUSE_ALLOCATE:
1942 case OMP_CLAUSE__LOOPTEMP_:
1943 case OMP_CLAUSE__REDUCTEMP_:
1944 case OMP_CLAUSE_TO:
1945 case OMP_CLAUSE_FROM:
1946 case OMP_CLAUSE_PRIORITY:
1947 case OMP_CLAUSE_GRAINSIZE:
1948 case OMP_CLAUSE_NUM_TASKS:
1949 case OMP_CLAUSE_THREADS:
1950 case OMP_CLAUSE_SIMD:
1951 case OMP_CLAUSE_NOGROUP:
1952 case OMP_CLAUSE_DEFAULTMAP:
1953 case OMP_CLAUSE_ORDER:
1954 case OMP_CLAUSE_BIND:
1955 case OMP_CLAUSE_USE_DEVICE_PTR:
1956 case OMP_CLAUSE_USE_DEVICE_ADDR:
1957 case OMP_CLAUSE_NONTEMPORAL:
1958 case OMP_CLAUSE_ASYNC:
1959 case OMP_CLAUSE_WAIT:
1960 case OMP_CLAUSE_NUM_GANGS:
1961 case OMP_CLAUSE_NUM_WORKERS:
1962 case OMP_CLAUSE_VECTOR_LENGTH:
1963 case OMP_CLAUSE_GANG:
1964 case OMP_CLAUSE_WORKER:
1965 case OMP_CLAUSE_VECTOR:
1966 case OMP_CLAUSE_INDEPENDENT:
1967 case OMP_CLAUSE_AUTO:
1968 case OMP_CLAUSE_SEQ:
1969 case OMP_CLAUSE_TILE:
1970 case OMP_CLAUSE__SIMT_:
1971 case OMP_CLAUSE_IF_PRESENT:
1972 case OMP_CLAUSE_FINALIZE:
1973 case OMP_CLAUSE_FILTER:
1974 case OMP_CLAUSE__CONDTEMP_:
1975 break;
1977 case OMP_CLAUSE__CACHE_:
1978 case OMP_CLAUSE_NOHOST:
1979 default:
1980 gcc_unreachable ();
1984 gcc_checking_assert (!scan_array_reductions
1985 || !is_gimple_omp_oacc (ctx->stmt));
1986 if (scan_array_reductions)
1988 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1989 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1990 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1991 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1992 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1994 omp_context *rctx = ctx;
1995 if (is_omp_target (ctx->stmt))
1996 rctx = ctx->outer;
1997 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1998 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
2000 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
2001 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
2002 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
2003 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
2004 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
2005 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
2009 /* Create a new name for omp child function. Returns an identifier. */
2011 static tree
2012 create_omp_child_function_name (bool task_copy)
2014 return clone_function_name_numbered (current_function_decl,
2015 task_copy ? "_omp_cpyfn" : "_omp_fn");
2018 /* Return true if CTX may belong to offloaded code: either if current function
2019 is offloaded, or any enclosing context corresponds to a target region. */
2021 static bool
2022 omp_maybe_offloaded_ctx (omp_context *ctx)
2024 if (cgraph_node::get (current_function_decl)->offloadable)
2025 return true;
2026 for (; ctx; ctx = ctx->outer)
2027 if (is_gimple_omp_offloaded (ctx->stmt))
2028 return true;
2029 return false;
2032 /* Build a decl for the omp child function. It'll not contain a body
2033 yet, just the bare decl. */
2035 static void
2036 create_omp_child_function (omp_context *ctx, bool task_copy)
2038 tree decl, type, name, t;
2040 name = create_omp_child_function_name (task_copy);
2041 if (task_copy)
2042 type = build_function_type_list (void_type_node, ptr_type_node,
2043 ptr_type_node, NULL_TREE);
2044 else
2045 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2047 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
2049 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
2050 || !task_copy);
2051 if (!task_copy)
2052 ctx->cb.dst_fn = decl;
2053 else
2054 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
2056 TREE_STATIC (decl) = 1;
2057 TREE_USED (decl) = 1;
2058 DECL_ARTIFICIAL (decl) = 1;
2059 DECL_IGNORED_P (decl) = 0;
2060 TREE_PUBLIC (decl) = 0;
2061 DECL_UNINLINABLE (decl) = 1;
2062 DECL_EXTERNAL (decl) = 0;
2063 DECL_CONTEXT (decl) = NULL_TREE;
2064 DECL_INITIAL (decl) = make_node (BLOCK);
2065 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
2066 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
2067 /* Remove omp declare simd attribute from the new attributes. */
2068 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
2070 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
2071 a = a2;
2072 a = TREE_CHAIN (a);
2073 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
2074 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
2075 *p = TREE_CHAIN (*p);
2076 else
2078 tree chain = TREE_CHAIN (*p);
2079 *p = copy_node (*p);
2080 p = &TREE_CHAIN (*p);
2081 *p = chain;
2084 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
2085 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
2086 DECL_FUNCTION_SPECIFIC_TARGET (decl)
2087 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2088 DECL_FUNCTION_VERSIONED (decl)
2089 = DECL_FUNCTION_VERSIONED (current_function_decl);
2091 if (omp_maybe_offloaded_ctx (ctx))
2093 cgraph_node::get_create (decl)->offloadable = 1;
2094 if (ENABLE_OFFLOADING)
2095 g->have_offload = true;
2098 if (cgraph_node::get_create (decl)->offloadable)
2100 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2101 ? "omp target entrypoint"
2102 : "omp declare target");
2103 if (lookup_attribute ("omp declare target",
2104 DECL_ATTRIBUTES (current_function_decl)))
2106 if (is_gimple_omp_offloaded (ctx->stmt))
2107 DECL_ATTRIBUTES (decl)
2108 = remove_attribute ("omp declare target",
2109 copy_list (DECL_ATTRIBUTES (decl)));
2110 else
2111 target_attr = NULL;
2113 if (target_attr
2114 && is_gimple_omp_offloaded (ctx->stmt)
2115 && lookup_attribute ("noclone", DECL_ATTRIBUTES (decl)) == NULL_TREE)
2116 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("noclone"),
2117 NULL_TREE, DECL_ATTRIBUTES (decl));
2118 if (target_attr)
2119 DECL_ATTRIBUTES (decl)
2120 = tree_cons (get_identifier (target_attr),
2121 NULL_TREE, DECL_ATTRIBUTES (decl));
2124 t = build_decl (DECL_SOURCE_LOCATION (decl),
2125 RESULT_DECL, NULL_TREE, void_type_node);
2126 DECL_ARTIFICIAL (t) = 1;
2127 DECL_IGNORED_P (t) = 1;
2128 DECL_CONTEXT (t) = decl;
2129 DECL_RESULT (decl) = t;
2131 tree data_name = get_identifier (".omp_data_i");
2132 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2133 ptr_type_node);
2134 DECL_ARTIFICIAL (t) = 1;
2135 DECL_NAMELESS (t) = 1;
2136 DECL_ARG_TYPE (t) = ptr_type_node;
2137 DECL_CONTEXT (t) = current_function_decl;
2138 TREE_USED (t) = 1;
2139 TREE_READONLY (t) = 1;
2140 DECL_ARGUMENTS (decl) = t;
2141 if (!task_copy)
2142 ctx->receiver_decl = t;
2143 else
2145 t = build_decl (DECL_SOURCE_LOCATION (decl),
2146 PARM_DECL, get_identifier (".omp_data_o"),
2147 ptr_type_node);
2148 DECL_ARTIFICIAL (t) = 1;
2149 DECL_NAMELESS (t) = 1;
2150 DECL_ARG_TYPE (t) = ptr_type_node;
2151 DECL_CONTEXT (t) = current_function_decl;
2152 TREE_USED (t) = 1;
2153 TREE_ADDRESSABLE (t) = 1;
2154 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2155 DECL_ARGUMENTS (decl) = t;
2158 /* Allocate memory for the function structure. The call to
2159 allocate_struct_function clobbers CFUN, so we need to restore
2160 it afterward. */
2161 push_struct_function (decl);
2162 cfun->function_end_locus = gimple_location (ctx->stmt);
2163 init_tree_ssa (cfun);
2164 pop_cfun ();
2167 /* Callback for walk_gimple_seq. Check if combined parallel
2168 contains gimple_omp_for_combined_into_p OMP_FOR. */
2170 tree
2171 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2172 bool *handled_ops_p,
2173 struct walk_stmt_info *wi)
2175 gimple *stmt = gsi_stmt (*gsi_p);
2177 *handled_ops_p = true;
2178 switch (gimple_code (stmt))
2180 WALK_SUBSTMTS;
2182 case GIMPLE_OMP_FOR:
2183 if (gimple_omp_for_combined_into_p (stmt)
2184 && gimple_omp_for_kind (stmt)
2185 == *(const enum gf_mask *) (wi->info))
2187 wi->info = stmt;
2188 return integer_zero_node;
2190 break;
2191 default:
2192 break;
2194 return NULL;
2197 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2199 static void
2200 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2201 omp_context *outer_ctx)
2203 struct walk_stmt_info wi;
2205 memset (&wi, 0, sizeof (wi));
2206 wi.val_only = true;
2207 wi.info = (void *) &msk;
2208 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2209 if (wi.info != (void *) &msk)
2211 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2212 struct omp_for_data fd;
2213 omp_extract_for_data (for_stmt, &fd, NULL);
2214 /* We need two temporaries with fd.loop.v type (istart/iend)
2215 and then (fd.collapse - 1) temporaries with the same
2216 type for count2 ... countN-1 vars if not constant. */
2217 size_t count = 2, i;
2218 tree type = fd.iter_type;
2219 if (fd.collapse > 1
2220 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2222 count += fd.collapse - 1;
2223 /* If there are lastprivate clauses on the inner
2224 GIMPLE_OMP_FOR, add one more temporaries for the total number
2225 of iterations (product of count1 ... countN-1). */
2226 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2227 OMP_CLAUSE_LASTPRIVATE)
2228 || (msk == GF_OMP_FOR_KIND_FOR
2229 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2230 OMP_CLAUSE_LASTPRIVATE)))
2232 tree temp = create_tmp_var (type);
2233 tree c = build_omp_clause (UNKNOWN_LOCATION,
2234 OMP_CLAUSE__LOOPTEMP_);
2235 insert_decl_map (&outer_ctx->cb, temp, temp);
2236 OMP_CLAUSE_DECL (c) = temp;
2237 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2238 gimple_omp_taskreg_set_clauses (stmt, c);
2240 if (fd.non_rect
2241 && fd.last_nonrect == fd.first_nonrect + 1)
2242 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2243 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2245 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2246 tree type2 = TREE_TYPE (v);
2247 count++;
2248 for (i = 0; i < 3; i++)
2250 tree temp = create_tmp_var (type2);
2251 tree c = build_omp_clause (UNKNOWN_LOCATION,
2252 OMP_CLAUSE__LOOPTEMP_);
2253 insert_decl_map (&outer_ctx->cb, temp, temp);
2254 OMP_CLAUSE_DECL (c) = temp;
2255 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2256 gimple_omp_taskreg_set_clauses (stmt, c);
2260 for (i = 0; i < count; i++)
2262 tree temp = create_tmp_var (type);
2263 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2264 insert_decl_map (&outer_ctx->cb, temp, temp);
2265 OMP_CLAUSE_DECL (c) = temp;
2266 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2267 gimple_omp_taskreg_set_clauses (stmt, c);
2270 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2271 && omp_find_clause (gimple_omp_task_clauses (stmt),
2272 OMP_CLAUSE_REDUCTION))
2274 tree type = build_pointer_type (pointer_sized_int_node);
2275 tree temp = create_tmp_var (type);
2276 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2277 insert_decl_map (&outer_ctx->cb, temp, temp);
2278 OMP_CLAUSE_DECL (c) = temp;
2279 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2280 gimple_omp_task_set_clauses (stmt, c);
2284 /* Scan an OpenMP parallel directive. */
2286 static void
2287 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2289 omp_context *ctx;
2290 tree name;
2291 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2293 /* Ignore parallel directives with empty bodies, unless there
2294 are copyin clauses. */
2295 if (optimize > 0
2296 && empty_body_p (gimple_omp_body (stmt))
2297 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2298 OMP_CLAUSE_COPYIN) == NULL)
2300 gsi_replace (gsi, gimple_build_nop (), false);
2301 return;
2304 if (gimple_omp_parallel_combined_p (stmt))
2305 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2306 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2307 OMP_CLAUSE_REDUCTION);
2308 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2309 if (OMP_CLAUSE_REDUCTION_TASK (c))
2311 tree type = build_pointer_type (pointer_sized_int_node);
2312 tree temp = create_tmp_var (type);
2313 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2314 if (outer_ctx)
2315 insert_decl_map (&outer_ctx->cb, temp, temp);
2316 OMP_CLAUSE_DECL (c) = temp;
2317 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2318 gimple_omp_parallel_set_clauses (stmt, c);
2319 break;
2321 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2322 break;
2324 ctx = new_omp_context (stmt, outer_ctx);
2325 taskreg_contexts.safe_push (ctx);
2326 if (taskreg_nesting_level > 1)
2327 ctx->is_nested = true;
2328 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2329 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2330 name = create_tmp_var_name (".omp_data_s");
2331 name = build_decl (gimple_location (stmt),
2332 TYPE_DECL, name, ctx->record_type);
2333 DECL_ARTIFICIAL (name) = 1;
2334 DECL_NAMELESS (name) = 1;
2335 TYPE_NAME (ctx->record_type) = name;
2336 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2337 create_omp_child_function (ctx, false);
2338 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2340 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2341 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2343 if (TYPE_FIELDS (ctx->record_type) == NULL)
2344 ctx->record_type = ctx->receiver_decl = NULL;
2347 /* Scan an OpenMP task directive. */
2349 static void
2350 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2352 omp_context *ctx;
2353 tree name, t;
2354 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2356 /* Ignore task directives with empty bodies, unless they have depend
2357 clause. */
2358 if (optimize > 0
2359 && gimple_omp_body (stmt)
2360 && empty_body_p (gimple_omp_body (stmt))
2361 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2363 gsi_replace (gsi, gimple_build_nop (), false);
2364 return;
2367 if (gimple_omp_task_taskloop_p (stmt))
2368 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2370 ctx = new_omp_context (stmt, outer_ctx);
2372 if (gimple_omp_task_taskwait_p (stmt))
2374 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2375 return;
2378 taskreg_contexts.safe_push (ctx);
2379 if (taskreg_nesting_level > 1)
2380 ctx->is_nested = true;
2381 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2382 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2383 name = create_tmp_var_name (".omp_data_s");
2384 name = build_decl (gimple_location (stmt),
2385 TYPE_DECL, name, ctx->record_type);
2386 DECL_ARTIFICIAL (name) = 1;
2387 DECL_NAMELESS (name) = 1;
2388 TYPE_NAME (ctx->record_type) = name;
2389 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2390 create_omp_child_function (ctx, false);
2391 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2393 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2395 if (ctx->srecord_type)
2397 name = create_tmp_var_name (".omp_data_a");
2398 name = build_decl (gimple_location (stmt),
2399 TYPE_DECL, name, ctx->srecord_type);
2400 DECL_ARTIFICIAL (name) = 1;
2401 DECL_NAMELESS (name) = 1;
2402 TYPE_NAME (ctx->srecord_type) = name;
2403 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2404 create_omp_child_function (ctx, true);
2407 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2409 if (TYPE_FIELDS (ctx->record_type) == NULL)
2411 ctx->record_type = ctx->receiver_decl = NULL;
2412 t = build_int_cst (long_integer_type_node, 0);
2413 gimple_omp_task_set_arg_size (stmt, t);
2414 t = build_int_cst (long_integer_type_node, 1);
2415 gimple_omp_task_set_arg_align (stmt, t);
2419 /* Helper function for finish_taskreg_scan, called through walk_tree.
2420 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2421 tree, replace it in the expression. */
2423 static tree
2424 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2426 if (VAR_P (*tp))
2428 omp_context *ctx = (omp_context *) data;
2429 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2430 if (t != *tp)
2432 if (DECL_HAS_VALUE_EXPR_P (t))
2433 t = unshare_expr (DECL_VALUE_EXPR (t));
2434 *tp = t;
2436 *walk_subtrees = 0;
2438 else if (IS_TYPE_OR_DECL_P (*tp))
2439 *walk_subtrees = 0;
2440 return NULL_TREE;
2443 /* If any decls have been made addressable during scan_omp,
2444 adjust their fields if needed, and layout record types
2445 of parallel/task constructs. */
2447 static void
2448 finish_taskreg_scan (omp_context *ctx)
2450 if (ctx->record_type == NULL_TREE)
2451 return;
2453 /* If any make_addressable_vars were needed, verify all
2454 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2455 statements if use_pointer_for_field hasn't changed
2456 because of that. If it did, update field types now. */
2457 if (make_addressable_vars)
2459 tree c;
2461 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2462 c; c = OMP_CLAUSE_CHAIN (c))
2463 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2464 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2466 tree decl = OMP_CLAUSE_DECL (c);
2468 /* Global variables don't need to be copied,
2469 the receiver side will use them directly. */
2470 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2471 continue;
2472 if (!bitmap_bit_p (make_addressable_vars, DECL_UID (decl))
2473 || !use_pointer_for_field (decl, ctx))
2474 continue;
2475 tree field = lookup_field (decl, ctx);
2476 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2477 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2478 continue;
2479 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2480 TREE_THIS_VOLATILE (field) = 0;
2481 DECL_USER_ALIGN (field) = 0;
2482 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2483 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2484 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2485 if (ctx->srecord_type)
2487 tree sfield = lookup_sfield (decl, ctx);
2488 TREE_TYPE (sfield) = TREE_TYPE (field);
2489 TREE_THIS_VOLATILE (sfield) = 0;
2490 DECL_USER_ALIGN (sfield) = 0;
2491 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2492 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2493 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2498 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2500 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2501 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2502 if (c)
2504 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2505 expects to find it at the start of data. */
2506 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2507 tree *p = &TYPE_FIELDS (ctx->record_type);
2508 while (*p)
2509 if (*p == f)
2511 *p = DECL_CHAIN (*p);
2512 break;
2514 else
2515 p = &DECL_CHAIN (*p);
2516 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2517 TYPE_FIELDS (ctx->record_type) = f;
2519 layout_type (ctx->record_type);
2520 fixup_child_record_type (ctx);
2522 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2524 layout_type (ctx->record_type);
2525 fixup_child_record_type (ctx);
2527 else
2529 location_t loc = gimple_location (ctx->stmt);
2530 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2531 tree detach_clause
2532 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2533 OMP_CLAUSE_DETACH);
2534 /* Move VLA fields to the end. */
2535 p = &TYPE_FIELDS (ctx->record_type);
2536 while (*p)
2537 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2538 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2540 *q = *p;
2541 *p = TREE_CHAIN (*p);
2542 TREE_CHAIN (*q) = NULL_TREE;
2543 q = &TREE_CHAIN (*q);
2545 else
2546 p = &DECL_CHAIN (*p);
2547 *p = vla_fields;
2548 if (gimple_omp_task_taskloop_p (ctx->stmt))
2550 /* Move fields corresponding to first and second _looptemp_
2551 clause first. There are filled by GOMP_taskloop
2552 and thus need to be in specific positions. */
2553 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2554 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2555 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2556 OMP_CLAUSE__LOOPTEMP_);
2557 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2558 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2559 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2560 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2561 p = &TYPE_FIELDS (ctx->record_type);
2562 while (*p)
2563 if (*p == f1 || *p == f2 || *p == f3)
2564 *p = DECL_CHAIN (*p);
2565 else
2566 p = &DECL_CHAIN (*p);
2567 DECL_CHAIN (f1) = f2;
2568 if (c3)
2570 DECL_CHAIN (f2) = f3;
2571 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2573 else
2574 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2575 TYPE_FIELDS (ctx->record_type) = f1;
2576 if (ctx->srecord_type)
2578 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2579 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2580 if (c3)
2581 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2582 p = &TYPE_FIELDS (ctx->srecord_type);
2583 while (*p)
2584 if (*p == f1 || *p == f2 || *p == f3)
2585 *p = DECL_CHAIN (*p);
2586 else
2587 p = &DECL_CHAIN (*p);
2588 DECL_CHAIN (f1) = f2;
2589 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2590 if (c3)
2592 DECL_CHAIN (f2) = f3;
2593 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2595 else
2596 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2597 TYPE_FIELDS (ctx->srecord_type) = f1;
2600 if (detach_clause)
2602 tree c, field;
2604 /* Look for a firstprivate clause with the detach event handle. */
2605 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2606 c; c = OMP_CLAUSE_CHAIN (c))
2608 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2609 continue;
2610 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2611 == OMP_CLAUSE_DECL (detach_clause))
2612 break;
2615 gcc_assert (c);
2616 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2618 /* Move field corresponding to the detach clause first.
2619 This is filled by GOMP_task and needs to be in a
2620 specific position. */
2621 p = &TYPE_FIELDS (ctx->record_type);
2622 while (*p)
2623 if (*p == field)
2624 *p = DECL_CHAIN (*p);
2625 else
2626 p = &DECL_CHAIN (*p);
2627 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2628 TYPE_FIELDS (ctx->record_type) = field;
2629 if (ctx->srecord_type)
2631 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2632 p = &TYPE_FIELDS (ctx->srecord_type);
2633 while (*p)
2634 if (*p == field)
2635 *p = DECL_CHAIN (*p);
2636 else
2637 p = &DECL_CHAIN (*p);
2638 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2639 TYPE_FIELDS (ctx->srecord_type) = field;
2642 layout_type (ctx->record_type);
2643 fixup_child_record_type (ctx);
2644 if (ctx->srecord_type)
2645 layout_type (ctx->srecord_type);
2646 tree t = fold_convert_loc (loc, long_integer_type_node,
2647 TYPE_SIZE_UNIT (ctx->record_type));
2648 if (TREE_CODE (t) != INTEGER_CST)
2650 t = unshare_expr (t);
2651 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2653 gimple_omp_task_set_arg_size (ctx->stmt, t);
2654 t = build_int_cst (long_integer_type_node,
2655 TYPE_ALIGN_UNIT (ctx->record_type));
2656 gimple_omp_task_set_arg_align (ctx->stmt, t);
2660 /* Find the enclosing offload context. */
2662 static omp_context *
2663 enclosing_target_ctx (omp_context *ctx)
2665 for (; ctx; ctx = ctx->outer)
2666 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2667 break;
2669 return ctx;
2672 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2673 construct.
2674 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2676 static bool
2677 ctx_in_oacc_kernels_region (omp_context *ctx)
2679 for (;ctx != NULL; ctx = ctx->outer)
2681 gimple *stmt = ctx->stmt;
2682 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2683 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2684 return true;
2687 return false;
2690 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2691 (This doesn't include OpenACC 'kernels' decomposed parts.)
2692 Until kernels handling moves to use the same loop indirection
2693 scheme as parallel, we need to do this checking early. */
2695 static unsigned
2696 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2698 bool checking = true;
2699 unsigned outer_mask = 0;
2700 unsigned this_mask = 0;
2701 bool has_seq = false, has_auto = false;
2703 if (ctx->outer)
2704 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2705 if (!stmt)
2707 checking = false;
2708 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2709 return outer_mask;
2710 stmt = as_a <gomp_for *> (ctx->stmt);
2713 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2715 switch (OMP_CLAUSE_CODE (c))
2717 case OMP_CLAUSE_GANG:
2718 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2719 break;
2720 case OMP_CLAUSE_WORKER:
2721 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2722 break;
2723 case OMP_CLAUSE_VECTOR:
2724 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2725 break;
2726 case OMP_CLAUSE_SEQ:
2727 has_seq = true;
2728 break;
2729 case OMP_CLAUSE_AUTO:
2730 has_auto = true;
2731 break;
2732 default:
2733 break;
2737 if (checking)
2739 if (has_seq && (this_mask || has_auto))
2740 error_at (gimple_location (stmt), "%<seq%> overrides other"
2741 " OpenACC loop specifiers");
2742 else if (has_auto && this_mask)
2743 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2744 " OpenACC loop specifiers");
2746 if (this_mask & outer_mask)
2747 error_at (gimple_location (stmt), "inner loop uses same"
2748 " OpenACC parallelism as containing loop");
2751 return outer_mask | this_mask;
2754 /* Scan a GIMPLE_OMP_FOR. */
2756 static omp_context *
2757 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2759 omp_context *ctx;
2760 size_t i;
2761 tree clauses = gimple_omp_for_clauses (stmt);
2763 ctx = new_omp_context (stmt, outer_ctx);
2765 if (is_gimple_omp_oacc (stmt))
2767 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2769 if (!(tgt && is_oacc_kernels (tgt)))
2770 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2772 tree c_op0;
2773 switch (OMP_CLAUSE_CODE (c))
2775 case OMP_CLAUSE_GANG:
2776 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2777 break;
2779 case OMP_CLAUSE_WORKER:
2780 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2781 break;
2783 case OMP_CLAUSE_VECTOR:
2784 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2785 break;
2787 default:
2788 continue;
2791 if (c_op0)
2793 /* By construction, this is impossible for OpenACC 'kernels'
2794 decomposed parts. */
2795 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2797 error_at (OMP_CLAUSE_LOCATION (c),
2798 "argument not permitted on %qs clause",
2799 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2800 if (tgt)
2801 inform (gimple_location (tgt->stmt),
2802 "enclosing parent compute construct");
2803 else if (oacc_get_fn_attrib (current_function_decl))
2804 inform (DECL_SOURCE_LOCATION (current_function_decl),
2805 "enclosing routine");
2806 else
2807 gcc_unreachable ();
2811 if (tgt && is_oacc_kernels (tgt))
2812 check_oacc_kernel_gwv (stmt, ctx);
2814 /* Collect all variables named in reductions on this loop. Ensure
2815 that, if this loop has a reduction on some variable v, and there is
2816 a reduction on v somewhere in an outer context, then there is a
2817 reduction on v on all intervening loops as well. */
2818 tree local_reduction_clauses = NULL;
2819 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2821 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2822 local_reduction_clauses
2823 = tree_cons (NULL, c, local_reduction_clauses);
2825 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2826 ctx->outer_reduction_clauses
2827 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2828 ctx->outer->outer_reduction_clauses);
2829 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2830 tree local_iter = local_reduction_clauses;
2831 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2833 tree local_clause = TREE_VALUE (local_iter);
2834 tree local_var = OMP_CLAUSE_DECL (local_clause);
2835 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2836 bool have_outer_reduction = false;
2837 tree ctx_iter = outer_reduction_clauses;
2838 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2840 tree outer_clause = TREE_VALUE (ctx_iter);
2841 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2842 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2843 if (outer_var == local_var && outer_op != local_op)
2845 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2846 "conflicting reduction operations for %qE",
2847 local_var);
2848 inform (OMP_CLAUSE_LOCATION (outer_clause),
2849 "location of the previous reduction for %qE",
2850 outer_var);
2852 if (outer_var == local_var)
2854 have_outer_reduction = true;
2855 break;
2858 if (have_outer_reduction)
2860 /* There is a reduction on outer_var both on this loop and on
2861 some enclosing loop. Walk up the context tree until such a
2862 loop with a reduction on outer_var is found, and complain
2863 about all intervening loops that do not have such a
2864 reduction. */
2865 struct omp_context *curr_loop = ctx->outer;
2866 bool found = false;
2867 while (curr_loop != NULL)
2869 tree curr_iter = curr_loop->local_reduction_clauses;
2870 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2872 tree curr_clause = TREE_VALUE (curr_iter);
2873 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2874 if (curr_var == local_var)
2876 found = true;
2877 break;
2880 if (!found)
2881 warning_at (gimple_location (curr_loop->stmt), 0,
2882 "nested loop in reduction needs "
2883 "reduction clause for %qE",
2884 local_var);
2885 else
2886 break;
2887 curr_loop = curr_loop->outer;
2891 ctx->local_reduction_clauses = local_reduction_clauses;
2892 ctx->outer_reduction_clauses
2893 = chainon (unshare_expr (ctx->local_reduction_clauses),
2894 ctx->outer_reduction_clauses);
2896 if (tgt && is_oacc_kernels (tgt))
2898 /* Strip out reductions, as they are not handled yet. */
2899 tree *prev_ptr = &clauses;
2901 while (tree probe = *prev_ptr)
2903 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2905 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2906 *prev_ptr = *next_ptr;
2907 else
2908 prev_ptr = next_ptr;
2911 gimple_omp_for_set_clauses (stmt, clauses);
2915 scan_sharing_clauses (clauses, ctx);
2917 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2918 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2920 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2921 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2922 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2923 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2925 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2926 return ctx;
2929 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2931 static void
2932 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2933 omp_context *outer_ctx)
2935 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2936 gsi_replace (gsi, bind, false);
2937 gimple_seq seq = NULL;
2938 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2939 tree cond = create_tmp_var_raw (integer_type_node);
2940 DECL_CONTEXT (cond) = current_function_decl;
2941 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2942 gimple_bind_set_vars (bind, cond);
2943 gimple_call_set_lhs (g, cond);
2944 gimple_seq_add_stmt (&seq, g);
2945 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2946 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2947 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2948 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2949 gimple_seq_add_stmt (&seq, g);
2950 g = gimple_build_label (lab1);
2951 gimple_seq_add_stmt (&seq, g);
2952 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2953 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2954 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2955 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2956 gimple_omp_for_set_clauses (new_stmt, clause);
2957 gimple_seq_add_stmt (&seq, new_stmt);
2958 g = gimple_build_goto (lab3);
2959 gimple_seq_add_stmt (&seq, g);
2960 g = gimple_build_label (lab2);
2961 gimple_seq_add_stmt (&seq, g);
2962 gimple_seq_add_stmt (&seq, stmt);
2963 g = gimple_build_label (lab3);
2964 gimple_seq_add_stmt (&seq, g);
2965 gimple_bind_set_body (bind, seq);
2966 update_stmt (bind);
2967 scan_omp_for (new_stmt, outer_ctx);
2968 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2971 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2972 struct walk_stmt_info *);
2973 static omp_context *maybe_lookup_ctx (gimple *);
2975 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2976 for scan phase loop. */
2978 static void
2979 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2980 omp_context *outer_ctx)
2982 /* The only change between inclusive and exclusive scan will be
2983 within the first simd loop, so just use inclusive in the
2984 worksharing loop. */
2985 outer_ctx->scan_inclusive = true;
2986 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2987 OMP_CLAUSE_DECL (c) = integer_zero_node;
2989 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2990 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2991 gsi_replace (gsi, input_stmt, false);
2992 gimple_seq input_body = NULL;
2993 gimple_seq_add_stmt (&input_body, stmt);
2994 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2996 gimple_stmt_iterator input1_gsi = gsi_none ();
2997 struct walk_stmt_info wi;
2998 memset (&wi, 0, sizeof (wi));
2999 wi.val_only = true;
3000 wi.info = (void *) &input1_gsi;
3001 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
3002 gcc_assert (!gsi_end_p (input1_gsi));
3004 gimple *input_stmt1 = gsi_stmt (input1_gsi);
3005 gsi_next (&input1_gsi);
3006 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
3007 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
3008 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
3009 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3010 std::swap (input_stmt1, scan_stmt1);
3012 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
3013 gimple_omp_set_body (input_stmt1, NULL);
3015 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
3016 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
3018 gimple_omp_set_body (input_stmt1, input_body1);
3019 gimple_omp_set_body (scan_stmt1, NULL);
3021 gimple_stmt_iterator input2_gsi = gsi_none ();
3022 memset (&wi, 0, sizeof (wi));
3023 wi.val_only = true;
3024 wi.info = (void *) &input2_gsi;
3025 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
3026 NULL, &wi);
3027 gcc_assert (!gsi_end_p (input2_gsi));
3029 gimple *input_stmt2 = gsi_stmt (input2_gsi);
3030 gsi_next (&input2_gsi);
3031 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
3032 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
3033 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3034 std::swap (input_stmt2, scan_stmt2);
3036 gimple_omp_set_body (input_stmt2, NULL);
3038 gimple_omp_set_body (input_stmt, input_body);
3039 gimple_omp_set_body (scan_stmt, scan_body);
3041 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
3042 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
3044 ctx = new_omp_context (scan_stmt, outer_ctx);
3045 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
3047 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
3050 /* Scan an OpenMP sections directive. */
3052 static void
3053 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
3055 omp_context *ctx;
3057 ctx = new_omp_context (stmt, outer_ctx);
3058 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
3059 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3062 /* Scan an OpenMP single directive. */
3064 static void
3065 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
3067 omp_context *ctx;
3068 tree name;
3070 ctx = new_omp_context (stmt, outer_ctx);
3071 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3072 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3073 name = create_tmp_var_name (".omp_copy_s");
3074 name = build_decl (gimple_location (stmt),
3075 TYPE_DECL, name, ctx->record_type);
3076 TYPE_NAME (ctx->record_type) = name;
3078 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
3079 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3081 if (TYPE_FIELDS (ctx->record_type) == NULL)
3082 ctx->record_type = NULL;
3083 else
3084 layout_type (ctx->record_type);
3087 /* Scan a GIMPLE_OMP_TARGET. */
3089 static void
3090 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3092 omp_context *ctx;
3093 tree name;
3094 bool offloaded = is_gimple_omp_offloaded (stmt);
3095 tree clauses = gimple_omp_target_clauses (stmt);
3097 ctx = new_omp_context (stmt, outer_ctx);
3098 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3099 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3100 name = create_tmp_var_name (".omp_data_t");
3101 name = build_decl (gimple_location (stmt),
3102 TYPE_DECL, name, ctx->record_type);
3103 DECL_ARTIFICIAL (name) = 1;
3104 DECL_NAMELESS (name) = 1;
3105 TYPE_NAME (ctx->record_type) = name;
3106 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3108 if (offloaded)
3110 create_omp_child_function (ctx, false);
3111 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3114 scan_sharing_clauses (clauses, ctx);
3115 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3117 if (TYPE_FIELDS (ctx->record_type) == NULL)
3118 ctx->record_type = ctx->receiver_decl = NULL;
3119 else
3121 TYPE_FIELDS (ctx->record_type)
3122 = nreverse (TYPE_FIELDS (ctx->record_type));
3123 if (flag_checking)
3125 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3126 for (tree field = TYPE_FIELDS (ctx->record_type);
3127 field;
3128 field = DECL_CHAIN (field))
3129 gcc_assert (DECL_ALIGN (field) == align);
3131 layout_type (ctx->record_type);
3132 if (offloaded)
3133 fixup_child_record_type (ctx);
3136 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3138 error_at (gimple_location (stmt),
3139 "%<target%> construct with nested %<teams%> construct "
3140 "contains directives outside of the %<teams%> construct");
3141 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3145 /* Scan an OpenMP teams directive. */
3147 static void
3148 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3150 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3152 if (!gimple_omp_teams_host (stmt))
3154 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3155 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3156 return;
3158 taskreg_contexts.safe_push (ctx);
3159 gcc_assert (taskreg_nesting_level == 1);
3160 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3161 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3162 tree name = create_tmp_var_name (".omp_data_s");
3163 name = build_decl (gimple_location (stmt),
3164 TYPE_DECL, name, ctx->record_type);
3165 DECL_ARTIFICIAL (name) = 1;
3166 DECL_NAMELESS (name) = 1;
3167 TYPE_NAME (ctx->record_type) = name;
3168 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3169 create_omp_child_function (ctx, false);
3170 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3172 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3173 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3175 if (TYPE_FIELDS (ctx->record_type) == NULL)
3176 ctx->record_type = ctx->receiver_decl = NULL;
3179 /* Check nesting restrictions. */
3180 static bool
3181 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3183 tree c;
3185 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3186 inside an OpenACC CTX. */
3187 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3188 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3189 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3191 else if (!(is_gimple_omp (stmt)
3192 && is_gimple_omp_oacc (stmt)))
3194 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3196 error_at (gimple_location (stmt),
3197 "non-OpenACC construct inside of OpenACC routine");
3198 return false;
3200 else
3201 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3202 if (is_gimple_omp (octx->stmt)
3203 && is_gimple_omp_oacc (octx->stmt))
3205 error_at (gimple_location (stmt),
3206 "non-OpenACC construct inside of OpenACC region");
3207 return false;
3211 if (ctx != NULL)
3213 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3214 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3216 c = omp_find_clause (gimple_omp_target_clauses (ctx->stmt),
3217 OMP_CLAUSE_DEVICE);
3218 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3220 error_at (gimple_location (stmt),
3221 "OpenMP constructs are not allowed in target region "
3222 "with %<ancestor%>");
3223 return false;
3226 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3227 ctx->teams_nested_p = true;
3228 else
3229 ctx->nonteams_nested_p = true;
3231 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3232 && ctx->outer
3233 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3234 ctx = ctx->outer;
3235 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3236 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3237 && !ctx->loop_p)
3239 c = NULL_TREE;
3240 if (ctx->order_concurrent
3241 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3242 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3243 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3245 error_at (gimple_location (stmt),
3246 "OpenMP constructs other than %<parallel%>, %<loop%>"
3247 " or %<simd%> may not be nested inside a region with"
3248 " the %<order(concurrent)%> clause");
3249 return false;
3251 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3253 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3254 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3256 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3257 && (ctx->outer == NULL
3258 || !gimple_omp_for_combined_into_p (ctx->stmt)
3259 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3260 || (gimple_omp_for_kind (ctx->outer->stmt)
3261 != GF_OMP_FOR_KIND_FOR)
3262 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3264 error_at (gimple_location (stmt),
3265 "%<ordered simd threads%> must be closely "
3266 "nested inside of %<%s simd%> region",
3267 lang_GNU_Fortran () ? "do" : "for");
3268 return false;
3270 return true;
3273 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3274 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3275 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3276 return true;
3277 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3278 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3279 return true;
3280 error_at (gimple_location (stmt),
3281 "OpenMP constructs other than "
3282 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3283 "not be nested inside %<simd%> region");
3284 return false;
3286 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3288 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3289 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3290 && omp_find_clause (gimple_omp_for_clauses (stmt),
3291 OMP_CLAUSE_BIND) == NULL_TREE))
3292 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3294 error_at (gimple_location (stmt),
3295 "only %<distribute%>, %<parallel%> or %<loop%> "
3296 "regions are allowed to be strictly nested inside "
3297 "%<teams%> region");
3298 return false;
3301 else if (ctx->order_concurrent
3302 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3303 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3304 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3305 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3307 if (ctx->loop_p)
3308 error_at (gimple_location (stmt),
3309 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3310 "%<simd%> may not be nested inside a %<loop%> region");
3311 else
3312 error_at (gimple_location (stmt),
3313 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3314 "%<simd%> may not be nested inside a region with "
3315 "the %<order(concurrent)%> clause");
3316 return false;
3319 switch (gimple_code (stmt))
3321 case GIMPLE_OMP_FOR:
3322 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3323 return true;
3324 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3326 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3328 error_at (gimple_location (stmt),
3329 "%<distribute%> region must be strictly nested "
3330 "inside %<teams%> construct");
3331 return false;
3333 return true;
3335 /* We split taskloop into task and nested taskloop in it. */
3336 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3337 return true;
3338 /* For now, hope this will change and loop bind(parallel) will not
3339 be allowed in lots of contexts. */
3340 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3341 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3342 return true;
3343 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3345 bool ok = false;
3347 if (ctx)
3348 switch (gimple_code (ctx->stmt))
3350 case GIMPLE_OMP_FOR:
3351 ok = (gimple_omp_for_kind (ctx->stmt)
3352 == GF_OMP_FOR_KIND_OACC_LOOP);
3353 break;
3355 case GIMPLE_OMP_TARGET:
3356 switch (gimple_omp_target_kind (ctx->stmt))
3358 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3359 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3360 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3361 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3362 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3363 ok = true;
3364 break;
3366 default:
3367 break;
3370 default:
3371 break;
3373 else if (oacc_get_fn_attrib (current_function_decl))
3374 ok = true;
3375 if (!ok)
3377 error_at (gimple_location (stmt),
3378 "OpenACC loop directive must be associated with"
3379 " an OpenACC compute region");
3380 return false;
3383 /* FALLTHRU */
3384 case GIMPLE_CALL:
3385 if (is_gimple_call (stmt)
3386 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3387 == BUILT_IN_GOMP_CANCEL
3388 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3389 == BUILT_IN_GOMP_CANCELLATION_POINT))
3391 const char *bad = NULL;
3392 const char *kind = NULL;
3393 const char *construct
3394 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3395 == BUILT_IN_GOMP_CANCEL)
3396 ? "cancel"
3397 : "cancellation point";
3398 if (ctx == NULL)
3400 error_at (gimple_location (stmt), "orphaned %qs construct",
3401 construct);
3402 return false;
3404 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3405 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3406 : 0)
3408 case 1:
3409 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3410 bad = "parallel";
3411 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3412 == BUILT_IN_GOMP_CANCEL
3413 && !integer_zerop (gimple_call_arg (stmt, 1)))
3414 ctx->cancellable = true;
3415 kind = "parallel";
3416 break;
3417 case 2:
3418 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3419 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3420 bad = "for";
3421 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3422 == BUILT_IN_GOMP_CANCEL
3423 && !integer_zerop (gimple_call_arg (stmt, 1)))
3425 ctx->cancellable = true;
3426 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3427 OMP_CLAUSE_NOWAIT))
3428 warning_at (gimple_location (stmt), 0,
3429 "%<cancel for%> inside "
3430 "%<nowait%> for construct");
3431 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3432 OMP_CLAUSE_ORDERED))
3433 warning_at (gimple_location (stmt), 0,
3434 "%<cancel for%> inside "
3435 "%<ordered%> for construct");
3437 kind = "for";
3438 break;
3439 case 4:
3440 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3441 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3442 bad = "sections";
3443 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3444 == BUILT_IN_GOMP_CANCEL
3445 && !integer_zerop (gimple_call_arg (stmt, 1)))
3447 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3449 ctx->cancellable = true;
3450 if (omp_find_clause (gimple_omp_sections_clauses
3451 (ctx->stmt),
3452 OMP_CLAUSE_NOWAIT))
3453 warning_at (gimple_location (stmt), 0,
3454 "%<cancel sections%> inside "
3455 "%<nowait%> sections construct");
3457 else
3459 gcc_assert (ctx->outer
3460 && gimple_code (ctx->outer->stmt)
3461 == GIMPLE_OMP_SECTIONS);
3462 ctx->outer->cancellable = true;
3463 if (omp_find_clause (gimple_omp_sections_clauses
3464 (ctx->outer->stmt),
3465 OMP_CLAUSE_NOWAIT))
3466 warning_at (gimple_location (stmt), 0,
3467 "%<cancel sections%> inside "
3468 "%<nowait%> sections construct");
3471 kind = "sections";
3472 break;
3473 case 8:
3474 if (!is_task_ctx (ctx)
3475 && (!is_taskloop_ctx (ctx)
3476 || ctx->outer == NULL
3477 || !is_task_ctx (ctx->outer)))
3478 bad = "task";
3479 else
3481 for (omp_context *octx = ctx->outer;
3482 octx; octx = octx->outer)
3484 switch (gimple_code (octx->stmt))
3486 case GIMPLE_OMP_TASKGROUP:
3487 break;
3488 case GIMPLE_OMP_TARGET:
3489 if (gimple_omp_target_kind (octx->stmt)
3490 != GF_OMP_TARGET_KIND_REGION)
3491 continue;
3492 /* FALLTHRU */
3493 case GIMPLE_OMP_PARALLEL:
3494 case GIMPLE_OMP_TEAMS:
3495 error_at (gimple_location (stmt),
3496 "%<%s taskgroup%> construct not closely "
3497 "nested inside of %<taskgroup%> region",
3498 construct);
3499 return false;
3500 case GIMPLE_OMP_TASK:
3501 if (gimple_omp_task_taskloop_p (octx->stmt)
3502 && octx->outer
3503 && is_taskloop_ctx (octx->outer))
3505 tree clauses
3506 = gimple_omp_for_clauses (octx->outer->stmt);
3507 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3508 break;
3510 continue;
3511 default:
3512 continue;
3514 break;
3516 ctx->cancellable = true;
3518 kind = "taskgroup";
3519 break;
3520 default:
3521 error_at (gimple_location (stmt), "invalid arguments");
3522 return false;
3524 if (bad)
3526 error_at (gimple_location (stmt),
3527 "%<%s %s%> construct not closely nested inside of %qs",
3528 construct, kind, bad);
3529 return false;
3532 /* FALLTHRU */
3533 case GIMPLE_OMP_SECTIONS:
3534 case GIMPLE_OMP_SINGLE:
3535 for (; ctx != NULL; ctx = ctx->outer)
3536 switch (gimple_code (ctx->stmt))
3538 case GIMPLE_OMP_FOR:
3539 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3540 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3541 break;
3542 /* FALLTHRU */
3543 case GIMPLE_OMP_SECTIONS:
3544 case GIMPLE_OMP_SINGLE:
3545 case GIMPLE_OMP_ORDERED:
3546 case GIMPLE_OMP_MASTER:
3547 case GIMPLE_OMP_MASKED:
3548 case GIMPLE_OMP_TASK:
3549 case GIMPLE_OMP_CRITICAL:
3550 if (is_gimple_call (stmt))
3552 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3553 != BUILT_IN_GOMP_BARRIER)
3554 return true;
3555 error_at (gimple_location (stmt),
3556 "barrier region may not be closely nested inside "
3557 "of work-sharing, %<loop%>, %<critical%>, "
3558 "%<ordered%>, %<master%>, %<masked%>, explicit "
3559 "%<task%> or %<taskloop%> region");
3560 return false;
3562 error_at (gimple_location (stmt),
3563 "work-sharing region may not be closely nested inside "
3564 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3565 "%<master%>, %<masked%>, explicit %<task%> or "
3566 "%<taskloop%> region");
3567 return false;
3568 case GIMPLE_OMP_PARALLEL:
3569 case GIMPLE_OMP_TEAMS:
3570 return true;
3571 case GIMPLE_OMP_TARGET:
3572 if (gimple_omp_target_kind (ctx->stmt)
3573 == GF_OMP_TARGET_KIND_REGION)
3574 return true;
3575 break;
3576 default:
3577 break;
3579 break;
3580 case GIMPLE_OMP_MASTER:
3581 case GIMPLE_OMP_MASKED:
3582 for (; ctx != NULL; ctx = ctx->outer)
3583 switch (gimple_code (ctx->stmt))
3585 case GIMPLE_OMP_FOR:
3586 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3587 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3588 break;
3589 /* FALLTHRU */
3590 case GIMPLE_OMP_SECTIONS:
3591 case GIMPLE_OMP_SINGLE:
3592 case GIMPLE_OMP_TASK:
3593 error_at (gimple_location (stmt),
3594 "%qs region may not be closely nested inside "
3595 "of work-sharing, %<loop%>, explicit %<task%> or "
3596 "%<taskloop%> region",
3597 gimple_code (stmt) == GIMPLE_OMP_MASTER
3598 ? "master" : "masked");
3599 return false;
3600 case GIMPLE_OMP_PARALLEL:
3601 case GIMPLE_OMP_TEAMS:
3602 return true;
3603 case GIMPLE_OMP_TARGET:
3604 if (gimple_omp_target_kind (ctx->stmt)
3605 == GF_OMP_TARGET_KIND_REGION)
3606 return true;
3607 break;
3608 default:
3609 break;
3611 break;
3612 case GIMPLE_OMP_SCOPE:
3613 for (; ctx != NULL; ctx = ctx->outer)
3614 switch (gimple_code (ctx->stmt))
3616 case GIMPLE_OMP_FOR:
3617 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3618 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3619 break;
3620 /* FALLTHRU */
3621 case GIMPLE_OMP_SECTIONS:
3622 case GIMPLE_OMP_SINGLE:
3623 case GIMPLE_OMP_TASK:
3624 case GIMPLE_OMP_CRITICAL:
3625 case GIMPLE_OMP_ORDERED:
3626 case GIMPLE_OMP_MASTER:
3627 case GIMPLE_OMP_MASKED:
3628 error_at (gimple_location (stmt),
3629 "%<scope%> region may not be closely nested inside "
3630 "of work-sharing, %<loop%>, explicit %<task%>, "
3631 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3632 "or %<masked%> region");
3633 return false;
3634 case GIMPLE_OMP_PARALLEL:
3635 case GIMPLE_OMP_TEAMS:
3636 return true;
3637 case GIMPLE_OMP_TARGET:
3638 if (gimple_omp_target_kind (ctx->stmt)
3639 == GF_OMP_TARGET_KIND_REGION)
3640 return true;
3641 break;
3642 default:
3643 break;
3645 break;
3646 case GIMPLE_OMP_TASK:
3647 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3648 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS)
3650 enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c);
3651 error_at (OMP_CLAUSE_LOCATION (c),
3652 "%<%s(%s)%> is only allowed in %<omp ordered%>",
3653 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross",
3654 kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink");
3655 return false;
3657 break;
3658 case GIMPLE_OMP_ORDERED:
3659 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3660 c; c = OMP_CLAUSE_CHAIN (c))
3662 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS)
3664 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
3666 error_at (OMP_CLAUSE_LOCATION (c),
3667 "invalid depend kind in omp %<ordered%> %<depend%>");
3668 return false;
3670 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3671 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3672 continue;
3675 tree oclause;
3676 /* Look for containing ordered(N) loop. */
3677 if (ctx == NULL
3678 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3679 || (oclause
3680 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3681 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3683 error_at (OMP_CLAUSE_LOCATION (c),
3684 "%<ordered%> construct with %<depend%> clause "
3685 "must be closely nested inside an %<ordered%> loop");
3686 return false;
3689 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3690 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3692 /* ordered simd must be closely nested inside of simd region,
3693 and simd region must not encounter constructs other than
3694 ordered simd, therefore ordered simd may be either orphaned,
3695 or ctx->stmt must be simd. The latter case is handled already
3696 earlier. */
3697 if (ctx != NULL)
3699 error_at (gimple_location (stmt),
3700 "%<ordered%> %<simd%> must be closely nested inside "
3701 "%<simd%> region");
3702 return false;
3705 for (; ctx != NULL; ctx = ctx->outer)
3706 switch (gimple_code (ctx->stmt))
3708 case GIMPLE_OMP_CRITICAL:
3709 case GIMPLE_OMP_TASK:
3710 case GIMPLE_OMP_ORDERED:
3711 ordered_in_taskloop:
3712 error_at (gimple_location (stmt),
3713 "%<ordered%> region may not be closely nested inside "
3714 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3715 "%<taskloop%> region");
3716 return false;
3717 case GIMPLE_OMP_FOR:
3718 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3719 goto ordered_in_taskloop;
3720 tree o;
3721 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3722 OMP_CLAUSE_ORDERED);
3723 if (o == NULL)
3725 error_at (gimple_location (stmt),
3726 "%<ordered%> region must be closely nested inside "
3727 "a loop region with an %<ordered%> clause");
3728 return false;
3730 if (!gimple_omp_ordered_standalone_p (stmt))
3732 if (OMP_CLAUSE_ORDERED_DOACROSS (o))
3734 error_at (gimple_location (stmt),
3735 "%<ordered%> construct without %<doacross%> or "
3736 "%<depend%> clauses must not have the same "
3737 "binding region as %<ordered%> construct with "
3738 "those clauses");
3739 return false;
3741 else if (OMP_CLAUSE_ORDERED_EXPR (o))
3743 tree co
3744 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3745 OMP_CLAUSE_COLLAPSE);
3746 HOST_WIDE_INT
3747 o_n = tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (o));
3748 HOST_WIDE_INT c_n = 1;
3749 if (co)
3750 c_n = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (co));
3751 if (o_n != c_n)
3753 error_at (gimple_location (stmt),
3754 "%<ordered%> construct without %<doacross%> "
3755 "or %<depend%> clauses binds to loop where "
3756 "%<collapse%> argument %wd is different from "
3757 "%<ordered%> argument %wd", c_n, o_n);
3758 return false;
3762 return true;
3763 case GIMPLE_OMP_TARGET:
3764 if (gimple_omp_target_kind (ctx->stmt)
3765 != GF_OMP_TARGET_KIND_REGION)
3766 break;
3767 /* FALLTHRU */
3768 case GIMPLE_OMP_PARALLEL:
3769 case GIMPLE_OMP_TEAMS:
3770 error_at (gimple_location (stmt),
3771 "%<ordered%> region must be closely nested inside "
3772 "a loop region with an %<ordered%> clause");
3773 return false;
3774 default:
3775 break;
3777 break;
3778 case GIMPLE_OMP_CRITICAL:
3780 tree this_stmt_name
3781 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3782 for (; ctx != NULL; ctx = ctx->outer)
3783 if (gomp_critical *other_crit
3784 = dyn_cast <gomp_critical *> (ctx->stmt))
3785 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3787 error_at (gimple_location (stmt),
3788 "%<critical%> region may not be nested inside "
3789 "a %<critical%> region with the same name");
3790 return false;
3793 break;
3794 case GIMPLE_OMP_TEAMS:
3795 if (ctx == NULL)
3796 break;
3797 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3798 || (gimple_omp_target_kind (ctx->stmt)
3799 != GF_OMP_TARGET_KIND_REGION))
3801 /* Teams construct can appear either strictly nested inside of
3802 target construct with no intervening stmts, or can be encountered
3803 only by initial task (so must not appear inside any OpenMP
3804 construct. */
3805 error_at (gimple_location (stmt),
3806 "%<teams%> construct must be closely nested inside of "
3807 "%<target%> construct or not nested in any OpenMP "
3808 "construct");
3809 return false;
3811 break;
3812 case GIMPLE_OMP_TARGET:
3813 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3814 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS)
3816 enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c);
3817 error_at (OMP_CLAUSE_LOCATION (c),
3818 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3819 kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink");
3820 return false;
3822 if (is_gimple_omp_offloaded (stmt)
3823 && oacc_get_fn_attrib (cfun->decl) != NULL)
3825 error_at (gimple_location (stmt),
3826 "OpenACC region inside of OpenACC routine, nested "
3827 "parallelism not supported yet");
3828 return false;
3830 for (; ctx != NULL; ctx = ctx->outer)
3832 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3834 if (is_gimple_omp (stmt)
3835 && is_gimple_omp_oacc (stmt)
3836 && is_gimple_omp (ctx->stmt))
3838 error_at (gimple_location (stmt),
3839 "OpenACC construct inside of non-OpenACC region");
3840 return false;
3842 continue;
3845 const char *stmt_name, *ctx_stmt_name;
3846 switch (gimple_omp_target_kind (stmt))
3848 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3849 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3850 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3851 case GF_OMP_TARGET_KIND_ENTER_DATA:
3852 stmt_name = "target enter data"; break;
3853 case GF_OMP_TARGET_KIND_EXIT_DATA:
3854 stmt_name = "target exit data"; break;
3855 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3856 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3857 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3858 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3859 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3860 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3861 stmt_name = "enter data"; break;
3862 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3863 stmt_name = "exit data"; break;
3864 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3865 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3866 break;
3867 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3868 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3869 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3870 /* OpenACC 'kernels' decomposed parts. */
3871 stmt_name = "kernels"; break;
3872 default: gcc_unreachable ();
3874 switch (gimple_omp_target_kind (ctx->stmt))
3876 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3877 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3878 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3879 ctx_stmt_name = "parallel"; break;
3880 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3881 ctx_stmt_name = "kernels"; break;
3882 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3883 ctx_stmt_name = "serial"; break;
3884 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3885 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3886 ctx_stmt_name = "host_data"; break;
3887 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3888 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3889 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3890 /* OpenACC 'kernels' decomposed parts. */
3891 ctx_stmt_name = "kernels"; break;
3892 default: gcc_unreachable ();
3895 /* OpenACC/OpenMP mismatch? */
3896 if (is_gimple_omp_oacc (stmt)
3897 != is_gimple_omp_oacc (ctx->stmt))
3899 error_at (gimple_location (stmt),
3900 "%s %qs construct inside of %s %qs region",
3901 (is_gimple_omp_oacc (stmt)
3902 ? "OpenACC" : "OpenMP"), stmt_name,
3903 (is_gimple_omp_oacc (ctx->stmt)
3904 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3905 return false;
3907 if (is_gimple_omp_offloaded (ctx->stmt))
3909 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3910 if (is_gimple_omp_oacc (ctx->stmt))
3912 error_at (gimple_location (stmt),
3913 "%qs construct inside of %qs region",
3914 stmt_name, ctx_stmt_name);
3915 return false;
3917 else
3919 if ((gimple_omp_target_kind (ctx->stmt)
3920 == GF_OMP_TARGET_KIND_REGION)
3921 && (gimple_omp_target_kind (stmt)
3922 == GF_OMP_TARGET_KIND_REGION))
3924 c = omp_find_clause (gimple_omp_target_clauses (stmt),
3925 OMP_CLAUSE_DEVICE);
3926 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3927 break;
3929 warning_at (gimple_location (stmt), 0,
3930 "%qs construct inside of %qs region",
3931 stmt_name, ctx_stmt_name);
3935 break;
3936 default:
3937 break;
3939 return true;
3943 /* Helper function scan_omp.
3945 Callback for walk_tree or operators in walk_gimple_stmt used to
3946 scan for OMP directives in TP. */
3948 static tree
3949 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3951 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3952 omp_context *ctx = (omp_context *) wi->info;
3953 tree t = *tp;
3955 switch (TREE_CODE (t))
3957 case VAR_DECL:
3958 case PARM_DECL:
3959 case LABEL_DECL:
3960 case RESULT_DECL:
3961 if (ctx)
3963 tree repl = remap_decl (t, &ctx->cb);
3964 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3965 *tp = repl;
3967 break;
3969 default:
3970 if (ctx && TYPE_P (t))
3971 *tp = remap_type (t, &ctx->cb);
3972 else if (!DECL_P (t))
3974 *walk_subtrees = 1;
3975 if (ctx)
3977 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3978 if (tem != TREE_TYPE (t))
3980 if (TREE_CODE (t) == INTEGER_CST)
3981 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3982 else
3983 TREE_TYPE (t) = tem;
3987 break;
3990 return NULL_TREE;
3993 /* Return true if FNDECL is a setjmp or a longjmp. */
3995 static bool
3996 setjmp_or_longjmp_p (const_tree fndecl)
3998 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP, BUILT_IN_LONGJMP))
3999 return true;
4001 tree declname = DECL_NAME (fndecl);
4002 if (!declname
4003 || (DECL_CONTEXT (fndecl) != NULL_TREE
4004 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
4005 || !TREE_PUBLIC (fndecl))
4006 return false;
4008 const char *name = IDENTIFIER_POINTER (declname);
4009 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
4012 /* Return true if FNDECL is an omp_* runtime API call. */
4014 static bool
4015 omp_runtime_api_call (const_tree fndecl)
4017 tree declname = DECL_NAME (fndecl);
4018 if (!declname
4019 || (DECL_CONTEXT (fndecl) != NULL_TREE
4020 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
4021 || !TREE_PUBLIC (fndecl))
4022 return false;
4024 const char *name = IDENTIFIER_POINTER (declname);
4025 if (!startswith (name, "omp_"))
4026 return false;
4028 static const char *omp_runtime_apis[] =
4030 /* This array has 3 sections. First omp_* calls that don't
4031 have any suffixes. */
4032 "aligned_alloc",
4033 "aligned_calloc",
4034 "alloc",
4035 "calloc",
4036 "free",
4037 "get_mapped_ptr",
4038 "realloc",
4039 "target_alloc",
4040 "target_associate_ptr",
4041 "target_disassociate_ptr",
4042 "target_free",
4043 "target_is_accessible",
4044 "target_is_present",
4045 "target_memcpy",
4046 "target_memcpy_async",
4047 "target_memcpy_rect",
4048 "target_memcpy_rect_async",
4049 NULL,
4050 /* Now omp_* calls that are available as omp_* and omp_*_; however, the
4051 DECL_NAME is always omp_* without tailing underscore. */
4052 "capture_affinity",
4053 "destroy_allocator",
4054 "destroy_lock",
4055 "destroy_nest_lock",
4056 "display_affinity",
4057 "fulfill_event",
4058 "get_active_level",
4059 "get_affinity_format",
4060 "get_cancellation",
4061 "get_default_allocator",
4062 "get_default_device",
4063 "get_device_num",
4064 "get_dynamic",
4065 "get_initial_device",
4066 "get_level",
4067 "get_max_active_levels",
4068 "get_max_task_priority",
4069 "get_max_teams",
4070 "get_max_threads",
4071 "get_nested",
4072 "get_num_devices",
4073 "get_num_places",
4074 "get_num_procs",
4075 "get_num_teams",
4076 "get_num_threads",
4077 "get_partition_num_places",
4078 "get_place_num",
4079 "get_proc_bind",
4080 "get_supported_active_levels",
4081 "get_team_num",
4082 "get_teams_thread_limit",
4083 "get_thread_limit",
4084 "get_thread_num",
4085 "get_wtick",
4086 "get_wtime",
4087 "in_explicit_task",
4088 "in_final",
4089 "in_parallel",
4090 "init_lock",
4091 "init_nest_lock",
4092 "is_initial_device",
4093 "pause_resource",
4094 "pause_resource_all",
4095 "set_affinity_format",
4096 "set_default_allocator",
4097 "set_lock",
4098 "set_nest_lock",
4099 "test_lock",
4100 "test_nest_lock",
4101 "unset_lock",
4102 "unset_nest_lock",
4103 NULL,
4104 /* And finally calls available as omp_*, omp_*_ and omp_*_8_; however,
4105 as DECL_NAME only omp_* and omp_*_8 appear. */
4106 "display_env",
4107 "get_ancestor_thread_num",
4108 "init_allocator",
4109 "get_partition_place_nums",
4110 "get_place_num_procs",
4111 "get_place_proc_ids",
4112 "get_schedule",
4113 "get_team_size",
4114 "set_default_device",
4115 "set_dynamic",
4116 "set_max_active_levels",
4117 "set_nested",
4118 "set_num_teams",
4119 "set_num_threads",
4120 "set_schedule",
4121 "set_teams_thread_limit"
4124 int mode = 0;
4125 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
4127 if (omp_runtime_apis[i] == NULL)
4129 mode++;
4130 continue;
4132 size_t len = strlen (omp_runtime_apis[i]);
4133 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
4134 && (name[4 + len] == '\0'
4135 || (mode > 1 && strcmp (name + 4 + len, "_8") == 0)))
4136 return true;
4138 return false;
4141 /* Helper function for scan_omp.
4143 Callback for walk_gimple_stmt used to scan for OMP directives in
4144 the current statement in GSI. */
4146 static tree
4147 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4148 struct walk_stmt_info *wi)
4150 gimple *stmt = gsi_stmt (*gsi);
4151 omp_context *ctx = (omp_context *) wi->info;
4153 if (gimple_has_location (stmt))
4154 input_location = gimple_location (stmt);
4156 /* Check the nesting restrictions. */
4157 bool remove = false;
4158 if (is_gimple_omp (stmt))
4159 remove = !check_omp_nesting_restrictions (stmt, ctx);
4160 else if (is_gimple_call (stmt))
4162 tree fndecl = gimple_call_fndecl (stmt);
4163 if (fndecl)
4165 if (ctx
4166 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4167 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4168 && setjmp_or_longjmp_p (fndecl)
4169 && !ctx->loop_p)
4171 remove = true;
4172 error_at (gimple_location (stmt),
4173 "setjmp/longjmp inside %<simd%> construct");
4175 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4176 switch (DECL_FUNCTION_CODE (fndecl))
4178 case BUILT_IN_GOMP_BARRIER:
4179 case BUILT_IN_GOMP_CANCEL:
4180 case BUILT_IN_GOMP_CANCELLATION_POINT:
4181 case BUILT_IN_GOMP_TASKYIELD:
4182 case BUILT_IN_GOMP_TASKWAIT:
4183 case BUILT_IN_GOMP_TASKGROUP_START:
4184 case BUILT_IN_GOMP_TASKGROUP_END:
4185 remove = !check_omp_nesting_restrictions (stmt, ctx);
4186 break;
4187 default:
4188 break;
4190 else if (ctx)
4192 omp_context *octx = ctx;
4193 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4194 octx = ctx->outer;
4195 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4197 remove = true;
4198 error_at (gimple_location (stmt),
4199 "OpenMP runtime API call %qD in a region with "
4200 "%<order(concurrent)%> clause", fndecl);
4202 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4203 && omp_runtime_api_call (fndecl)
4204 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4205 != strlen ("omp_get_num_teams"))
4206 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4207 "omp_get_num_teams") != 0)
4208 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4209 != strlen ("omp_get_team_num"))
4210 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4211 "omp_get_team_num") != 0))
4213 remove = true;
4214 error_at (gimple_location (stmt),
4215 "OpenMP runtime API call %qD strictly nested in a "
4216 "%<teams%> region", fndecl);
4218 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
4219 && (gimple_omp_target_kind (ctx->stmt)
4220 == GF_OMP_TARGET_KIND_REGION)
4221 && omp_runtime_api_call (fndecl))
4223 tree tgt_clauses = gimple_omp_target_clauses (ctx->stmt);
4224 tree c = omp_find_clause (tgt_clauses, OMP_CLAUSE_DEVICE);
4225 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4226 error_at (gimple_location (stmt),
4227 "OpenMP runtime API call %qD in a region with "
4228 "%<device(ancestor)%> clause", fndecl);
4233 if (remove)
4235 stmt = gimple_build_nop ();
4236 gsi_replace (gsi, stmt, false);
4239 *handled_ops_p = true;
4241 switch (gimple_code (stmt))
4243 case GIMPLE_OMP_PARALLEL:
4244 taskreg_nesting_level++;
4245 scan_omp_parallel (gsi, ctx);
4246 taskreg_nesting_level--;
4247 break;
4249 case GIMPLE_OMP_TASK:
4250 taskreg_nesting_level++;
4251 scan_omp_task (gsi, ctx);
4252 taskreg_nesting_level--;
4253 break;
4255 case GIMPLE_OMP_FOR:
4256 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4257 == GF_OMP_FOR_KIND_SIMD)
4258 && gimple_omp_for_combined_into_p (stmt)
4259 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4261 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4262 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4263 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4265 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4266 break;
4269 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4270 == GF_OMP_FOR_KIND_SIMD)
4271 && omp_maybe_offloaded_ctx (ctx)
4272 && omp_max_simt_vf ()
4273 && gimple_omp_for_collapse (stmt) == 1)
4274 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4275 else
4276 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4277 break;
4279 case GIMPLE_OMP_SCOPE:
4280 ctx = new_omp_context (stmt, ctx);
4281 scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4282 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4283 break;
4285 case GIMPLE_OMP_SECTIONS:
4286 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4287 break;
4289 case GIMPLE_OMP_SINGLE:
4290 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4291 break;
4293 case GIMPLE_OMP_SCAN:
4294 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4296 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4297 ctx->scan_inclusive = true;
4298 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4299 ctx->scan_exclusive = true;
4301 /* FALLTHRU */
4302 case GIMPLE_OMP_SECTION:
4303 case GIMPLE_OMP_MASTER:
4304 case GIMPLE_OMP_ORDERED:
4305 case GIMPLE_OMP_CRITICAL:
4306 ctx = new_omp_context (stmt, ctx);
4307 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4308 break;
4310 case GIMPLE_OMP_MASKED:
4311 ctx = new_omp_context (stmt, ctx);
4312 scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4313 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4314 break;
4316 case GIMPLE_OMP_TASKGROUP:
4317 ctx = new_omp_context (stmt, ctx);
4318 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4319 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4320 break;
4322 case GIMPLE_OMP_TARGET:
4323 if (is_gimple_omp_offloaded (stmt))
4325 taskreg_nesting_level++;
4326 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4327 taskreg_nesting_level--;
4329 else
4330 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4331 break;
4333 case GIMPLE_OMP_TEAMS:
4334 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4336 taskreg_nesting_level++;
4337 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4338 taskreg_nesting_level--;
4340 else
4341 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4342 break;
4344 case GIMPLE_BIND:
4346 tree var;
4348 *handled_ops_p = false;
4349 if (ctx)
4350 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4351 var ;
4352 var = DECL_CHAIN (var))
4353 insert_decl_map (&ctx->cb, var, var);
4355 break;
4356 default:
4357 *handled_ops_p = false;
4358 break;
4361 return NULL_TREE;
4365 /* Scan all the statements starting at the current statement. CTX
4366 contains context information about the OMP directives and
4367 clauses found during the scan. */
4369 static void
4370 scan_omp (gimple_seq *body_p, omp_context *ctx)
4372 location_t saved_location;
4373 struct walk_stmt_info wi;
4375 memset (&wi, 0, sizeof (wi));
4376 wi.info = ctx;
4377 wi.want_locations = true;
4379 saved_location = input_location;
4380 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4381 input_location = saved_location;
4384 /* Re-gimplification and code generation routines. */
4386 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4387 of BIND if in a method. */
4389 static void
4390 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4392 if (DECL_ARGUMENTS (current_function_decl)
4393 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4394 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4395 == POINTER_TYPE))
4397 tree vars = gimple_bind_vars (bind);
4398 for (tree *pvar = &vars; *pvar; )
4399 if (omp_member_access_dummy_var (*pvar))
4400 *pvar = DECL_CHAIN (*pvar);
4401 else
4402 pvar = &DECL_CHAIN (*pvar);
4403 gimple_bind_set_vars (bind, vars);
4407 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4408 block and its subblocks. */
4410 static void
4411 remove_member_access_dummy_vars (tree block)
4413 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4414 if (omp_member_access_dummy_var (*pvar))
4415 *pvar = DECL_CHAIN (*pvar);
4416 else
4417 pvar = &DECL_CHAIN (*pvar);
4419 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4420 remove_member_access_dummy_vars (block);
4423 /* If a context was created for STMT when it was scanned, return it. */
4425 static omp_context *
4426 maybe_lookup_ctx (gimple *stmt)
4428 splay_tree_node n;
4429 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4430 return n ? (omp_context *) n->value : NULL;
4434 /* Find the mapping for DECL in CTX or the immediately enclosing
4435 context that has a mapping for DECL.
4437 If CTX is a nested parallel directive, we may have to use the decl
4438 mappings created in CTX's parent context. Suppose that we have the
4439 following parallel nesting (variable UIDs showed for clarity):
4441 iD.1562 = 0;
4442 #omp parallel shared(iD.1562) -> outer parallel
4443 iD.1562 = iD.1562 + 1;
4445 #omp parallel shared (iD.1562) -> inner parallel
4446 iD.1562 = iD.1562 - 1;
4448 Each parallel structure will create a distinct .omp_data_s structure
4449 for copying iD.1562 in/out of the directive:
4451 outer parallel .omp_data_s.1.i -> iD.1562
4452 inner parallel .omp_data_s.2.i -> iD.1562
4454 A shared variable mapping will produce a copy-out operation before
4455 the parallel directive and a copy-in operation after it. So, in
4456 this case we would have:
4458 iD.1562 = 0;
4459 .omp_data_o.1.i = iD.1562;
4460 #omp parallel shared(iD.1562) -> outer parallel
4461 .omp_data_i.1 = &.omp_data_o.1
4462 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4464 .omp_data_o.2.i = iD.1562; -> **
4465 #omp parallel shared(iD.1562) -> inner parallel
4466 .omp_data_i.2 = &.omp_data_o.2
4467 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4470 ** This is a problem. The symbol iD.1562 cannot be referenced
4471 inside the body of the outer parallel region. But since we are
4472 emitting this copy operation while expanding the inner parallel
4473 directive, we need to access the CTX structure of the outer
4474 parallel directive to get the correct mapping:
4476 .omp_data_o.2.i = .omp_data_i.1->i
4478 Since there may be other workshare or parallel directives enclosing
4479 the parallel directive, it may be necessary to walk up the context
4480 parent chain. This is not a problem in general because nested
4481 parallelism happens only rarely. */
4483 static tree
4484 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4486 tree t;
4487 omp_context *up;
4489 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4490 t = maybe_lookup_decl (decl, up);
4492 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4494 return t ? t : decl;
4498 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4499 in outer contexts. */
4501 static tree
4502 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4504 tree t = NULL;
4505 omp_context *up;
4507 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4508 t = maybe_lookup_decl (decl, up);
4510 return t ? t : decl;
4514 /* Construct the initialization value for reduction operation OP. */
4516 tree
4517 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4519 switch (op)
4521 case PLUS_EXPR:
4522 case MINUS_EXPR:
4523 case BIT_IOR_EXPR:
4524 case BIT_XOR_EXPR:
4525 case TRUTH_OR_EXPR:
4526 case TRUTH_ORIF_EXPR:
4527 case TRUTH_XOR_EXPR:
4528 case NE_EXPR:
4529 return build_zero_cst (type);
4531 case MULT_EXPR:
4532 case TRUTH_AND_EXPR:
4533 case TRUTH_ANDIF_EXPR:
4534 case EQ_EXPR:
4535 return fold_convert_loc (loc, type, integer_one_node);
4537 case BIT_AND_EXPR:
4538 return fold_convert_loc (loc, type, integer_minus_one_node);
4540 case MAX_EXPR:
4541 if (SCALAR_FLOAT_TYPE_P (type))
4543 REAL_VALUE_TYPE min;
4544 if (HONOR_INFINITIES (type))
4545 real_arithmetic (&min, NEGATE_EXPR, &dconstinf, NULL);
4546 else
4547 real_maxval (&min, 1, TYPE_MODE (type));
4548 return build_real (type, min);
4550 else if (POINTER_TYPE_P (type))
4552 wide_int min
4553 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4554 return wide_int_to_tree (type, min);
4556 else
4558 gcc_assert (INTEGRAL_TYPE_P (type));
4559 return TYPE_MIN_VALUE (type);
4562 case MIN_EXPR:
4563 if (SCALAR_FLOAT_TYPE_P (type))
4565 REAL_VALUE_TYPE max;
4566 if (HONOR_INFINITIES (type))
4567 max = dconstinf;
4568 else
4569 real_maxval (&max, 0, TYPE_MODE (type));
4570 return build_real (type, max);
4572 else if (POINTER_TYPE_P (type))
4574 wide_int max
4575 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4576 return wide_int_to_tree (type, max);
4578 else
4580 gcc_assert (INTEGRAL_TYPE_P (type));
4581 return TYPE_MAX_VALUE (type);
4584 default:
4585 gcc_unreachable ();
4589 /* Construct the initialization value for reduction CLAUSE. */
4591 tree
4592 omp_reduction_init (tree clause, tree type)
4594 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4595 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4598 /* Return alignment to be assumed for var in CLAUSE, which should be
4599 OMP_CLAUSE_ALIGNED. */
4601 static tree
4602 omp_clause_aligned_alignment (tree clause)
4604 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4605 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4607 /* Otherwise return implementation defined alignment. */
4608 unsigned int al = 1;
4609 opt_scalar_mode mode_iter;
4610 auto_vector_modes modes;
4611 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4612 static enum mode_class classes[]
4613 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4614 for (int i = 0; i < 4; i += 2)
4615 /* The for loop above dictates that we only walk through scalar classes. */
4616 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4618 scalar_mode mode = mode_iter.require ();
4619 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4620 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4621 continue;
4622 machine_mode alt_vmode;
4623 for (unsigned int j = 0; j < modes.length (); ++j)
4624 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4625 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4626 vmode = alt_vmode;
4628 tree type = lang_hooks.types.type_for_mode (mode, 1);
4629 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4630 continue;
4631 type = build_vector_type_for_mode (type, vmode);
4632 if (TYPE_MODE (type) != vmode)
4633 continue;
4634 if (TYPE_ALIGN_UNIT (type) > al)
4635 al = TYPE_ALIGN_UNIT (type);
4637 return build_int_cst (integer_type_node, al);
4641 /* This structure is part of the interface between lower_rec_simd_input_clauses
4642 and lower_rec_input_clauses. */
4644 class omplow_simd_context {
4645 public:
4646 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4647 tree idx;
4648 tree lane;
4649 tree lastlane;
4650 vec<tree, va_heap> simt_eargs;
4651 gimple_seq simt_dlist;
4652 poly_uint64_pod max_vf;
4653 bool is_simt;
4656 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4657 privatization. */
4659 static bool
4660 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4661 omplow_simd_context *sctx, tree &ivar,
4662 tree &lvar, tree *rvar = NULL,
4663 tree *rvar2 = NULL)
4665 if (known_eq (sctx->max_vf, 0U))
4667 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4668 if (maybe_gt (sctx->max_vf, 1U))
4670 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4671 OMP_CLAUSE_SAFELEN);
4672 if (c)
4674 poly_uint64 safe_len;
4675 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4676 || maybe_lt (safe_len, 1U))
4677 sctx->max_vf = 1;
4678 else
4679 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4682 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4684 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4685 c = OMP_CLAUSE_CHAIN (c))
4687 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4688 continue;
4690 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4692 /* UDR reductions are not supported yet for SIMT, disable
4693 SIMT. */
4694 sctx->max_vf = 1;
4695 break;
4698 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4699 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4701 /* Doing boolean operations on non-integral types is
4702 for conformance only, it's not worth supporting this
4703 for SIMT. */
4704 sctx->max_vf = 1;
4705 break;
4709 if (maybe_gt (sctx->max_vf, 1U))
4711 sctx->idx = create_tmp_var (unsigned_type_node);
4712 sctx->lane = create_tmp_var (unsigned_type_node);
4715 if (known_eq (sctx->max_vf, 1U))
4716 return false;
4718 if (sctx->is_simt)
4720 if (is_gimple_reg (new_var))
4722 ivar = lvar = new_var;
4723 return true;
4725 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4726 ivar = lvar = create_tmp_var (type);
4727 TREE_ADDRESSABLE (ivar) = 1;
4728 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4729 NULL, DECL_ATTRIBUTES (ivar));
4730 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4731 tree clobber = build_clobber (type);
4732 gimple *g = gimple_build_assign (ivar, clobber);
4733 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4735 else
4737 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4738 tree avar = create_tmp_var_raw (atype);
4739 if (TREE_ADDRESSABLE (new_var))
4740 TREE_ADDRESSABLE (avar) = 1;
4741 DECL_ATTRIBUTES (avar)
4742 = tree_cons (get_identifier ("omp simd array"), NULL,
4743 DECL_ATTRIBUTES (avar));
4744 gimple_add_tmp_var (avar);
4745 tree iavar = avar;
4746 if (rvar && !ctx->for_simd_scan_phase)
4748 /* For inscan reductions, create another array temporary,
4749 which will hold the reduced value. */
4750 iavar = create_tmp_var_raw (atype);
4751 if (TREE_ADDRESSABLE (new_var))
4752 TREE_ADDRESSABLE (iavar) = 1;
4753 DECL_ATTRIBUTES (iavar)
4754 = tree_cons (get_identifier ("omp simd array"), NULL,
4755 tree_cons (get_identifier ("omp simd inscan"), NULL,
4756 DECL_ATTRIBUTES (iavar)));
4757 gimple_add_tmp_var (iavar);
4758 ctx->cb.decl_map->put (avar, iavar);
4759 if (sctx->lastlane == NULL_TREE)
4760 sctx->lastlane = create_tmp_var (unsigned_type_node);
4761 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4762 sctx->lastlane, NULL_TREE, NULL_TREE);
4763 TREE_THIS_NOTRAP (*rvar) = 1;
4765 if (ctx->scan_exclusive)
4767 /* And for exclusive scan yet another one, which will
4768 hold the value during the scan phase. */
4769 tree savar = create_tmp_var_raw (atype);
4770 if (TREE_ADDRESSABLE (new_var))
4771 TREE_ADDRESSABLE (savar) = 1;
4772 DECL_ATTRIBUTES (savar)
4773 = tree_cons (get_identifier ("omp simd array"), NULL,
4774 tree_cons (get_identifier ("omp simd inscan "
4775 "exclusive"), NULL,
4776 DECL_ATTRIBUTES (savar)));
4777 gimple_add_tmp_var (savar);
4778 ctx->cb.decl_map->put (iavar, savar);
4779 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4780 sctx->idx, NULL_TREE, NULL_TREE);
4781 TREE_THIS_NOTRAP (*rvar2) = 1;
4784 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4785 NULL_TREE, NULL_TREE);
4786 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4787 NULL_TREE, NULL_TREE);
4788 TREE_THIS_NOTRAP (ivar) = 1;
4789 TREE_THIS_NOTRAP (lvar) = 1;
4791 if (DECL_P (new_var))
4793 SET_DECL_VALUE_EXPR (new_var, lvar);
4794 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4796 return true;
4799 /* Helper function of lower_rec_input_clauses. For a reference
4800 in simd reduction, add an underlying variable it will reference. */
4802 static void
4803 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4805 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4806 if (TREE_CONSTANT (z))
4808 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4809 get_name (new_vard));
4810 gimple_add_tmp_var (z);
4811 TREE_ADDRESSABLE (z) = 1;
4812 z = build_fold_addr_expr_loc (loc, z);
4813 gimplify_assign (new_vard, z, ilist);
4817 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4818 code to emit (type) (tskred_temp[idx]). */
4820 static tree
4821 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4822 unsigned idx)
4824 unsigned HOST_WIDE_INT sz
4825 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4826 tree r = build2 (MEM_REF, pointer_sized_int_node,
4827 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4828 idx * sz));
4829 tree v = create_tmp_var (pointer_sized_int_node);
4830 gimple *g = gimple_build_assign (v, r);
4831 gimple_seq_add_stmt (ilist, g);
4832 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4834 v = create_tmp_var (type);
4835 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4836 gimple_seq_add_stmt (ilist, g);
4838 return v;
4841 /* Lower early initialization of privatized variable NEW_VAR
4842 if it needs an allocator (has allocate clause). */
4844 static bool
4845 lower_private_allocate (tree var, tree new_var, tree &allocator,
4846 tree &allocate_ptr, gimple_seq *ilist,
4847 omp_context *ctx, bool is_ref, tree size)
4849 if (allocator)
4850 return false;
4851 gcc_assert (allocate_ptr == NULL_TREE);
4852 if (ctx->allocate_map
4853 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4854 if (tree *allocatorp = ctx->allocate_map->get (var))
4855 allocator = *allocatorp;
4856 if (allocator == NULL_TREE)
4857 return false;
4858 if (!is_ref && omp_privatize_by_reference (var))
4860 allocator = NULL_TREE;
4861 return false;
4864 unsigned HOST_WIDE_INT ialign = 0;
4865 if (TREE_CODE (allocator) == TREE_LIST)
4867 ialign = tree_to_uhwi (TREE_VALUE (allocator));
4868 allocator = TREE_PURPOSE (allocator);
4870 if (TREE_CODE (allocator) != INTEGER_CST)
4871 allocator = build_outer_var_ref (allocator, ctx, OMP_CLAUSE_ALLOCATE);
4872 allocator = fold_convert (pointer_sized_int_node, allocator);
4873 if (TREE_CODE (allocator) != INTEGER_CST)
4875 tree var = create_tmp_var (TREE_TYPE (allocator));
4876 gimplify_assign (var, allocator, ilist);
4877 allocator = var;
4880 tree ptr_type, align, sz = size;
4881 if (TYPE_P (new_var))
4883 ptr_type = build_pointer_type (new_var);
4884 ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4886 else if (is_ref)
4888 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4889 ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4891 else
4893 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4894 ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4895 if (sz == NULL_TREE)
4896 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4898 align = build_int_cst (size_type_node, ialign);
4899 if (TREE_CODE (sz) != INTEGER_CST)
4901 tree szvar = create_tmp_var (size_type_node);
4902 gimplify_assign (szvar, sz, ilist);
4903 sz = szvar;
4905 allocate_ptr = create_tmp_var (ptr_type);
4906 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4907 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4908 gimple_call_set_lhs (g, allocate_ptr);
4909 gimple_seq_add_stmt (ilist, g);
4910 if (!is_ref)
4912 tree x = build_simple_mem_ref (allocate_ptr);
4913 TREE_THIS_NOTRAP (x) = 1;
4914 SET_DECL_VALUE_EXPR (new_var, x);
4915 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4917 return true;
4920 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4921 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4922 private variables. Initialization statements go in ILIST, while calls
4923 to destructors go in DLIST. */
4925 static void
4926 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4927 omp_context *ctx, struct omp_for_data *fd)
4929 tree c, copyin_seq, x, ptr;
4930 bool copyin_by_ref = false;
4931 bool lastprivate_firstprivate = false;
4932 bool reduction_omp_orig_ref = false;
4933 int pass;
4934 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4935 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4936 omplow_simd_context sctx = omplow_simd_context ();
4937 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4938 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4939 gimple_seq llist[4] = { };
4940 tree nonconst_simd_if = NULL_TREE;
4942 copyin_seq = NULL;
4943 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4945 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4946 with data sharing clauses referencing variable sized vars. That
4947 is unnecessarily hard to support and very unlikely to result in
4948 vectorized code anyway. */
4949 if (is_simd)
4950 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4951 switch (OMP_CLAUSE_CODE (c))
4953 case OMP_CLAUSE_LINEAR:
4954 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4955 sctx.max_vf = 1;
4956 /* FALLTHRU */
4957 case OMP_CLAUSE_PRIVATE:
4958 case OMP_CLAUSE_FIRSTPRIVATE:
4959 case OMP_CLAUSE_LASTPRIVATE:
4960 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4961 sctx.max_vf = 1;
4962 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4964 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4965 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4966 sctx.max_vf = 1;
4968 break;
4969 case OMP_CLAUSE_REDUCTION:
4970 case OMP_CLAUSE_IN_REDUCTION:
4971 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4972 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4973 sctx.max_vf = 1;
4974 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4976 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4977 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4978 sctx.max_vf = 1;
4980 break;
4981 case OMP_CLAUSE_IF:
4982 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4983 sctx.max_vf = 1;
4984 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4985 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4986 break;
4987 case OMP_CLAUSE_SIMDLEN:
4988 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4989 sctx.max_vf = 1;
4990 break;
4991 case OMP_CLAUSE__CONDTEMP_:
4992 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4993 if (sctx.is_simt)
4994 sctx.max_vf = 1;
4995 break;
4996 default:
4997 continue;
5000 /* Add a placeholder for simduid. */
5001 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
5002 sctx.simt_eargs.safe_push (NULL_TREE);
5004 unsigned task_reduction_cnt = 0;
5005 unsigned task_reduction_cntorig = 0;
5006 unsigned task_reduction_cnt_full = 0;
5007 unsigned task_reduction_cntorig_full = 0;
5008 unsigned task_reduction_other_cnt = 0;
5009 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
5010 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
5011 /* Do all the fixed sized types in the first pass, and the variable sized
5012 types in the second pass. This makes sure that the scalar arguments to
5013 the variable sized types are processed before we use them in the
5014 variable sized operations. For task reductions we use 4 passes, in the
5015 first two we ignore them, in the third one gather arguments for
5016 GOMP_task_reduction_remap call and in the last pass actually handle
5017 the task reductions. */
5018 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
5019 ? 4 : 2); ++pass)
5021 if (pass == 2 && task_reduction_cnt)
5023 tskred_atype
5024 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
5025 + task_reduction_cntorig);
5026 tskred_avar = create_tmp_var_raw (tskred_atype);
5027 gimple_add_tmp_var (tskred_avar);
5028 TREE_ADDRESSABLE (tskred_avar) = 1;
5029 task_reduction_cnt_full = task_reduction_cnt;
5030 task_reduction_cntorig_full = task_reduction_cntorig;
5032 else if (pass == 3 && task_reduction_cnt)
5034 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
5035 gimple *g
5036 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
5037 size_int (task_reduction_cntorig),
5038 build_fold_addr_expr (tskred_avar));
5039 gimple_seq_add_stmt (ilist, g);
5041 if (pass == 3 && task_reduction_other_cnt)
5043 /* For reduction clauses, build
5044 tskred_base = (void *) tskred_temp[2]
5045 + omp_get_thread_num () * tskred_temp[1]
5046 or if tskred_temp[1] is known to be constant, that constant
5047 directly. This is the start of the private reduction copy block
5048 for the current thread. */
5049 tree v = create_tmp_var (integer_type_node);
5050 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
5051 gimple *g = gimple_build_call (x, 0);
5052 gimple_call_set_lhs (g, v);
5053 gimple_seq_add_stmt (ilist, g);
5054 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
5055 tskred_temp = OMP_CLAUSE_DECL (c);
5056 if (is_taskreg_ctx (ctx))
5057 tskred_temp = lookup_decl (tskred_temp, ctx);
5058 tree v2 = create_tmp_var (sizetype);
5059 g = gimple_build_assign (v2, NOP_EXPR, v);
5060 gimple_seq_add_stmt (ilist, g);
5061 if (ctx->task_reductions[0])
5062 v = fold_convert (sizetype, ctx->task_reductions[0]);
5063 else
5064 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
5065 tree v3 = create_tmp_var (sizetype);
5066 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
5067 gimple_seq_add_stmt (ilist, g);
5068 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
5069 tskred_base = create_tmp_var (ptr_type_node);
5070 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
5071 gimple_seq_add_stmt (ilist, g);
5073 task_reduction_cnt = 0;
5074 task_reduction_cntorig = 0;
5075 task_reduction_other_cnt = 0;
5076 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5078 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
5079 tree var, new_var;
5080 bool by_ref;
5081 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5082 bool task_reduction_p = false;
5083 bool task_reduction_needs_orig_p = false;
5084 tree cond = NULL_TREE;
5085 tree allocator, allocate_ptr;
5087 switch (c_kind)
5089 case OMP_CLAUSE_PRIVATE:
5090 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
5091 continue;
5092 break;
5093 case OMP_CLAUSE_SHARED:
5094 /* Ignore shared directives in teams construct inside
5095 of target construct. */
5096 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5097 && !is_host_teams_ctx (ctx))
5098 continue;
5099 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
5101 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
5102 || is_global_var (OMP_CLAUSE_DECL (c)));
5103 continue;
5105 case OMP_CLAUSE_FIRSTPRIVATE:
5106 case OMP_CLAUSE_COPYIN:
5107 break;
5108 case OMP_CLAUSE_LINEAR:
5109 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
5110 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5111 lastprivate_firstprivate = true;
5112 break;
5113 case OMP_CLAUSE_REDUCTION:
5114 case OMP_CLAUSE_IN_REDUCTION:
5115 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5116 || is_task_ctx (ctx)
5117 || OMP_CLAUSE_REDUCTION_TASK (c))
5119 task_reduction_p = true;
5120 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5122 task_reduction_other_cnt++;
5123 if (pass == 2)
5124 continue;
5126 else
5127 task_reduction_cnt++;
5128 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5130 var = OMP_CLAUSE_DECL (c);
5131 /* If var is a global variable that isn't privatized
5132 in outer contexts, we don't need to look up the
5133 original address, it is always the address of the
5134 global variable itself. */
5135 if (!DECL_P (var)
5136 || omp_privatize_by_reference (var)
5137 || !is_global_var
5138 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5140 task_reduction_needs_orig_p = true;
5141 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5142 task_reduction_cntorig++;
5146 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5147 reduction_omp_orig_ref = true;
5148 break;
5149 case OMP_CLAUSE__REDUCTEMP_:
5150 if (!is_taskreg_ctx (ctx))
5151 continue;
5152 /* FALLTHRU */
5153 case OMP_CLAUSE__LOOPTEMP_:
5154 /* Handle _looptemp_/_reductemp_ clauses only on
5155 parallel/task. */
5156 if (fd)
5157 continue;
5158 break;
5159 case OMP_CLAUSE_LASTPRIVATE:
5160 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5162 lastprivate_firstprivate = true;
5163 if (pass != 0 || is_taskloop_ctx (ctx))
5164 continue;
5166 /* Even without corresponding firstprivate, if
5167 decl is Fortran allocatable, it needs outer var
5168 reference. */
5169 else if (pass == 0
5170 && lang_hooks.decls.omp_private_outer_ref
5171 (OMP_CLAUSE_DECL (c)))
5172 lastprivate_firstprivate = true;
5173 break;
5174 case OMP_CLAUSE_ALIGNED:
5175 if (pass != 1)
5176 continue;
5177 var = OMP_CLAUSE_DECL (c);
5178 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5179 && !is_global_var (var))
5181 new_var = maybe_lookup_decl (var, ctx);
5182 if (new_var == NULL_TREE)
5183 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5184 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5185 tree alarg = omp_clause_aligned_alignment (c);
5186 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5187 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5188 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5189 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5190 gimplify_and_add (x, ilist);
5192 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5193 && is_global_var (var))
5195 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5196 new_var = lookup_decl (var, ctx);
5197 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5198 t = build_fold_addr_expr_loc (clause_loc, t);
5199 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5200 tree alarg = omp_clause_aligned_alignment (c);
5201 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5202 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5203 t = fold_convert_loc (clause_loc, ptype, t);
5204 x = create_tmp_var (ptype);
5205 t = build2 (MODIFY_EXPR, ptype, x, t);
5206 gimplify_and_add (t, ilist);
5207 t = build_simple_mem_ref_loc (clause_loc, x);
5208 SET_DECL_VALUE_EXPR (new_var, t);
5209 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5211 continue;
5212 case OMP_CLAUSE__CONDTEMP_:
5213 if (is_parallel_ctx (ctx)
5214 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5215 break;
5216 continue;
5217 default:
5218 continue;
5221 if (task_reduction_p != (pass >= 2))
5222 continue;
5224 allocator = NULL_TREE;
5225 allocate_ptr = NULL_TREE;
5226 new_var = var = OMP_CLAUSE_DECL (c);
5227 if ((c_kind == OMP_CLAUSE_REDUCTION
5228 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5229 && TREE_CODE (var) == MEM_REF)
5231 var = TREE_OPERAND (var, 0);
5232 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5233 var = TREE_OPERAND (var, 0);
5234 if (TREE_CODE (var) == INDIRECT_REF
5235 || TREE_CODE (var) == ADDR_EXPR)
5236 var = TREE_OPERAND (var, 0);
5237 if (is_variable_sized (var))
5239 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5240 var = DECL_VALUE_EXPR (var);
5241 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5242 var = TREE_OPERAND (var, 0);
5243 gcc_assert (DECL_P (var));
5245 new_var = var;
5247 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5249 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5250 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5252 else if (c_kind != OMP_CLAUSE_COPYIN)
5253 new_var = lookup_decl (var, ctx);
5255 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5257 if (pass != 0)
5258 continue;
5260 /* C/C++ array section reductions. */
5261 else if ((c_kind == OMP_CLAUSE_REDUCTION
5262 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5263 && var != OMP_CLAUSE_DECL (c))
5265 if (pass == 0)
5266 continue;
5268 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5269 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5271 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5273 tree b = TREE_OPERAND (orig_var, 1);
5274 if (is_omp_target (ctx->stmt))
5275 b = NULL_TREE;
5276 else
5277 b = maybe_lookup_decl (b, ctx);
5278 if (b == NULL)
5280 b = TREE_OPERAND (orig_var, 1);
5281 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5283 if (integer_zerop (bias))
5284 bias = b;
5285 else
5287 bias = fold_convert_loc (clause_loc,
5288 TREE_TYPE (b), bias);
5289 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5290 TREE_TYPE (b), b, bias);
5292 orig_var = TREE_OPERAND (orig_var, 0);
5294 if (pass == 2)
5296 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5297 if (is_global_var (out)
5298 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5299 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5300 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5301 != POINTER_TYPE)))
5302 x = var;
5303 else if (is_omp_target (ctx->stmt))
5304 x = out;
5305 else
5307 bool by_ref = use_pointer_for_field (var, NULL);
5308 x = build_receiver_ref (var, by_ref, ctx);
5309 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5310 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5311 == POINTER_TYPE))
5312 x = build_fold_addr_expr (x);
5314 if (TREE_CODE (orig_var) == INDIRECT_REF)
5315 x = build_simple_mem_ref (x);
5316 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5318 if (var == TREE_OPERAND (orig_var, 0))
5319 x = build_fold_addr_expr (x);
5321 bias = fold_convert (sizetype, bias);
5322 x = fold_convert (ptr_type_node, x);
5323 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5324 TREE_TYPE (x), x, bias);
5325 unsigned cnt = task_reduction_cnt - 1;
5326 if (!task_reduction_needs_orig_p)
5327 cnt += (task_reduction_cntorig_full
5328 - task_reduction_cntorig);
5329 else
5330 cnt = task_reduction_cntorig - 1;
5331 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5332 size_int (cnt), NULL_TREE, NULL_TREE);
5333 gimplify_assign (r, x, ilist);
5334 continue;
5337 if (TREE_CODE (orig_var) == INDIRECT_REF
5338 || TREE_CODE (orig_var) == ADDR_EXPR)
5339 orig_var = TREE_OPERAND (orig_var, 0);
5340 tree d = OMP_CLAUSE_DECL (c);
5341 tree type = TREE_TYPE (d);
5342 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5343 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5344 tree sz = v;
5345 const char *name = get_name (orig_var);
5346 if (pass != 3 && !TREE_CONSTANT (v))
5348 tree t;
5349 if (is_omp_target (ctx->stmt))
5350 t = NULL_TREE;
5351 else
5352 t = maybe_lookup_decl (v, ctx);
5353 if (t)
5354 v = t;
5355 else
5356 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5357 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5358 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5359 TREE_TYPE (v), v,
5360 build_int_cst (TREE_TYPE (v), 1));
5361 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5362 TREE_TYPE (v), t,
5363 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5365 if (pass == 3)
5367 tree xv = create_tmp_var (ptr_type_node);
5368 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5370 unsigned cnt = task_reduction_cnt - 1;
5371 if (!task_reduction_needs_orig_p)
5372 cnt += (task_reduction_cntorig_full
5373 - task_reduction_cntorig);
5374 else
5375 cnt = task_reduction_cntorig - 1;
5376 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5377 size_int (cnt), NULL_TREE, NULL_TREE);
5379 gimple *g = gimple_build_assign (xv, x);
5380 gimple_seq_add_stmt (ilist, g);
5382 else
5384 unsigned int idx = *ctx->task_reduction_map->get (c);
5385 tree off;
5386 if (ctx->task_reductions[1 + idx])
5387 off = fold_convert (sizetype,
5388 ctx->task_reductions[1 + idx]);
5389 else
5390 off = task_reduction_read (ilist, tskred_temp, sizetype,
5391 7 + 3 * idx + 1);
5392 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5393 tskred_base, off);
5394 gimple_seq_add_stmt (ilist, g);
5396 x = fold_convert (build_pointer_type (boolean_type_node),
5397 xv);
5398 if (TREE_CONSTANT (v))
5399 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5400 TYPE_SIZE_UNIT (type));
5401 else
5403 tree t;
5404 if (is_omp_target (ctx->stmt))
5405 t = NULL_TREE;
5406 else
5407 t = maybe_lookup_decl (v, ctx);
5408 if (t)
5409 v = t;
5410 else
5411 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5412 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5413 fb_rvalue);
5414 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5415 TREE_TYPE (v), v,
5416 build_int_cst (TREE_TYPE (v), 1));
5417 t = fold_build2_loc (clause_loc, MULT_EXPR,
5418 TREE_TYPE (v), t,
5419 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5420 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5422 cond = create_tmp_var (TREE_TYPE (x));
5423 gimplify_assign (cond, x, ilist);
5424 x = xv;
5426 else if (lower_private_allocate (var, type, allocator,
5427 allocate_ptr, ilist, ctx,
5428 true,
5429 TREE_CONSTANT (v)
5430 ? TYPE_SIZE_UNIT (type)
5431 : sz))
5432 x = allocate_ptr;
5433 else if (TREE_CONSTANT (v))
5435 x = create_tmp_var_raw (type, name);
5436 gimple_add_tmp_var (x);
5437 TREE_ADDRESSABLE (x) = 1;
5438 x = build_fold_addr_expr_loc (clause_loc, x);
5440 else
5442 tree atmp
5443 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5444 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5445 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5448 tree ptype = build_pointer_type (TREE_TYPE (type));
5449 x = fold_convert_loc (clause_loc, ptype, x);
5450 tree y = create_tmp_var (ptype, name);
5451 gimplify_assign (y, x, ilist);
5452 x = y;
5453 tree yb = y;
5455 if (!integer_zerop (bias))
5457 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5458 bias);
5459 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5461 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5462 pointer_sized_int_node, yb, bias);
5463 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5464 yb = create_tmp_var (ptype, name);
5465 gimplify_assign (yb, x, ilist);
5466 x = yb;
5469 d = TREE_OPERAND (d, 0);
5470 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5471 d = TREE_OPERAND (d, 0);
5472 if (TREE_CODE (d) == ADDR_EXPR)
5474 if (orig_var != var)
5476 gcc_assert (is_variable_sized (orig_var));
5477 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5479 gimplify_assign (new_var, x, ilist);
5480 tree new_orig_var = lookup_decl (orig_var, ctx);
5481 tree t = build_fold_indirect_ref (new_var);
5482 DECL_IGNORED_P (new_var) = 0;
5483 TREE_THIS_NOTRAP (t) = 1;
5484 SET_DECL_VALUE_EXPR (new_orig_var, t);
5485 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5487 else
5489 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5490 build_int_cst (ptype, 0));
5491 SET_DECL_VALUE_EXPR (new_var, x);
5492 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5495 else
5497 gcc_assert (orig_var == var);
5498 if (TREE_CODE (d) == INDIRECT_REF)
5500 x = create_tmp_var (ptype, name);
5501 TREE_ADDRESSABLE (x) = 1;
5502 gimplify_assign (x, yb, ilist);
5503 x = build_fold_addr_expr_loc (clause_loc, x);
5505 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5506 gimplify_assign (new_var, x, ilist);
5508 /* GOMP_taskgroup_reduction_register memsets the whole
5509 array to zero. If the initializer is zero, we don't
5510 need to initialize it again, just mark it as ever
5511 used unconditionally, i.e. cond = true. */
5512 if (cond
5513 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5514 && initializer_zerop (omp_reduction_init (c,
5515 TREE_TYPE (type))))
5517 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5518 boolean_true_node);
5519 gimple_seq_add_stmt (ilist, g);
5520 continue;
5522 tree end = create_artificial_label (UNKNOWN_LOCATION);
5523 if (cond)
5525 gimple *g;
5526 if (!is_parallel_ctx (ctx))
5528 tree condv = create_tmp_var (boolean_type_node);
5529 g = gimple_build_assign (condv,
5530 build_simple_mem_ref (cond));
5531 gimple_seq_add_stmt (ilist, g);
5532 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5533 g = gimple_build_cond (NE_EXPR, condv,
5534 boolean_false_node, end, lab1);
5535 gimple_seq_add_stmt (ilist, g);
5536 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5538 g = gimple_build_assign (build_simple_mem_ref (cond),
5539 boolean_true_node);
5540 gimple_seq_add_stmt (ilist, g);
5543 tree y1 = create_tmp_var (ptype);
5544 gimplify_assign (y1, y, ilist);
5545 tree i2 = NULL_TREE, y2 = NULL_TREE;
5546 tree body2 = NULL_TREE, end2 = NULL_TREE;
5547 tree y3 = NULL_TREE, y4 = NULL_TREE;
5548 if (task_reduction_needs_orig_p)
5550 y3 = create_tmp_var (ptype);
5551 tree ref;
5552 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5553 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5554 size_int (task_reduction_cnt_full
5555 + task_reduction_cntorig - 1),
5556 NULL_TREE, NULL_TREE);
5557 else
5559 unsigned int idx = *ctx->task_reduction_map->get (c);
5560 ref = task_reduction_read (ilist, tskred_temp, ptype,
5561 7 + 3 * idx);
5563 gimplify_assign (y3, ref, ilist);
5565 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5567 if (pass != 3)
5569 y2 = create_tmp_var (ptype);
5570 gimplify_assign (y2, y, ilist);
5572 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5574 tree ref = build_outer_var_ref (var, ctx);
5575 /* For ref build_outer_var_ref already performs this. */
5576 if (TREE_CODE (d) == INDIRECT_REF)
5577 gcc_assert (omp_privatize_by_reference (var));
5578 else if (TREE_CODE (d) == ADDR_EXPR)
5579 ref = build_fold_addr_expr (ref);
5580 else if (omp_privatize_by_reference (var))
5581 ref = build_fold_addr_expr (ref);
5582 ref = fold_convert_loc (clause_loc, ptype, ref);
5583 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5584 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5586 y3 = create_tmp_var (ptype);
5587 gimplify_assign (y3, unshare_expr (ref), ilist);
5589 if (is_simd)
5591 y4 = create_tmp_var (ptype);
5592 gimplify_assign (y4, ref, dlist);
5596 tree i = create_tmp_var (TREE_TYPE (v));
5597 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5598 tree body = create_artificial_label (UNKNOWN_LOCATION);
5599 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5600 if (y2)
5602 i2 = create_tmp_var (TREE_TYPE (v));
5603 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5604 body2 = create_artificial_label (UNKNOWN_LOCATION);
5605 end2 = create_artificial_label (UNKNOWN_LOCATION);
5606 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5608 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5610 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5611 tree decl_placeholder
5612 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5613 SET_DECL_VALUE_EXPR (decl_placeholder,
5614 build_simple_mem_ref (y1));
5615 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5616 SET_DECL_VALUE_EXPR (placeholder,
5617 y3 ? build_simple_mem_ref (y3)
5618 : error_mark_node);
5619 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5620 x = lang_hooks.decls.omp_clause_default_ctor
5621 (c, build_simple_mem_ref (y1),
5622 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5623 if (x)
5624 gimplify_and_add (x, ilist);
5625 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5627 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5628 lower_omp (&tseq, ctx);
5629 gimple_seq_add_seq (ilist, tseq);
5631 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5632 if (is_simd)
5634 SET_DECL_VALUE_EXPR (decl_placeholder,
5635 build_simple_mem_ref (y2));
5636 SET_DECL_VALUE_EXPR (placeholder,
5637 build_simple_mem_ref (y4));
5638 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5639 lower_omp (&tseq, ctx);
5640 gimple_seq_add_seq (dlist, tseq);
5641 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5643 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5644 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5645 if (y2)
5647 x = lang_hooks.decls.omp_clause_dtor
5648 (c, build_simple_mem_ref (y2));
5649 if (x)
5650 gimplify_and_add (x, dlist);
5653 else
5655 x = omp_reduction_init (c, TREE_TYPE (type));
5656 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5658 /* reduction(-:var) sums up the partial results, so it
5659 acts identically to reduction(+:var). */
5660 if (code == MINUS_EXPR)
5661 code = PLUS_EXPR;
5663 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5664 if (is_simd)
5666 x = build2 (code, TREE_TYPE (type),
5667 build_simple_mem_ref (y4),
5668 build_simple_mem_ref (y2));
5669 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5672 gimple *g
5673 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5674 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5675 gimple_seq_add_stmt (ilist, g);
5676 if (y3)
5678 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5679 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5680 gimple_seq_add_stmt (ilist, g);
5682 g = gimple_build_assign (i, PLUS_EXPR, i,
5683 build_int_cst (TREE_TYPE (i), 1));
5684 gimple_seq_add_stmt (ilist, g);
5685 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5686 gimple_seq_add_stmt (ilist, g);
5687 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5688 if (y2)
5690 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5691 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5692 gimple_seq_add_stmt (dlist, g);
5693 if (y4)
5695 g = gimple_build_assign
5696 (y4, POINTER_PLUS_EXPR, y4,
5697 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5698 gimple_seq_add_stmt (dlist, g);
5700 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5701 build_int_cst (TREE_TYPE (i2), 1));
5702 gimple_seq_add_stmt (dlist, g);
5703 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5704 gimple_seq_add_stmt (dlist, g);
5705 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5707 if (allocator)
5709 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5710 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5711 gimple_seq_add_stmt (dlist, g);
5713 continue;
5715 else if (pass == 2)
5717 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5718 if (is_global_var (out))
5719 x = var;
5720 else if (is_omp_target (ctx->stmt))
5721 x = out;
5722 else
5724 bool by_ref = use_pointer_for_field (var, ctx);
5725 x = build_receiver_ref (var, by_ref, ctx);
5727 if (!omp_privatize_by_reference (var))
5728 x = build_fold_addr_expr (x);
5729 x = fold_convert (ptr_type_node, x);
5730 unsigned cnt = task_reduction_cnt - 1;
5731 if (!task_reduction_needs_orig_p)
5732 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5733 else
5734 cnt = task_reduction_cntorig - 1;
5735 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5736 size_int (cnt), NULL_TREE, NULL_TREE);
5737 gimplify_assign (r, x, ilist);
5738 continue;
5740 else if (pass == 3)
5742 tree type = TREE_TYPE (new_var);
5743 if (!omp_privatize_by_reference (var))
5744 type = build_pointer_type (type);
5745 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5747 unsigned cnt = task_reduction_cnt - 1;
5748 if (!task_reduction_needs_orig_p)
5749 cnt += (task_reduction_cntorig_full
5750 - task_reduction_cntorig);
5751 else
5752 cnt = task_reduction_cntorig - 1;
5753 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5754 size_int (cnt), NULL_TREE, NULL_TREE);
5756 else
5758 unsigned int idx = *ctx->task_reduction_map->get (c);
5759 tree off;
5760 if (ctx->task_reductions[1 + idx])
5761 off = fold_convert (sizetype,
5762 ctx->task_reductions[1 + idx]);
5763 else
5764 off = task_reduction_read (ilist, tskred_temp, sizetype,
5765 7 + 3 * idx + 1);
5766 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5767 tskred_base, off);
5769 x = fold_convert (type, x);
5770 tree t;
5771 if (omp_privatize_by_reference (var))
5773 gimplify_assign (new_var, x, ilist);
5774 t = new_var;
5775 new_var = build_simple_mem_ref (new_var);
5777 else
5779 t = create_tmp_var (type);
5780 gimplify_assign (t, x, ilist);
5781 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5782 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5784 t = fold_convert (build_pointer_type (boolean_type_node), t);
5785 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5786 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5787 cond = create_tmp_var (TREE_TYPE (t));
5788 gimplify_assign (cond, t, ilist);
5790 else if (is_variable_sized (var))
5792 /* For variable sized types, we need to allocate the
5793 actual storage here. Call alloca and store the
5794 result in the pointer decl that we created elsewhere. */
5795 if (pass == 0)
5796 continue;
5798 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5800 tree tmp;
5802 ptr = DECL_VALUE_EXPR (new_var);
5803 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5804 ptr = TREE_OPERAND (ptr, 0);
5805 gcc_assert (DECL_P (ptr));
5806 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5808 if (lower_private_allocate (var, new_var, allocator,
5809 allocate_ptr, ilist, ctx,
5810 false, x))
5811 tmp = allocate_ptr;
5812 else
5814 /* void *tmp = __builtin_alloca */
5815 tree atmp
5816 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5817 gcall *stmt
5818 = gimple_build_call (atmp, 2, x,
5819 size_int (DECL_ALIGN (var)));
5820 cfun->calls_alloca = 1;
5821 tmp = create_tmp_var_raw (ptr_type_node);
5822 gimple_add_tmp_var (tmp);
5823 gimple_call_set_lhs (stmt, tmp);
5825 gimple_seq_add_stmt (ilist, stmt);
5828 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5829 gimplify_assign (ptr, x, ilist);
5832 else if (omp_privatize_by_reference (var)
5833 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5834 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5836 /* For references that are being privatized for Fortran,
5837 allocate new backing storage for the new pointer
5838 variable. This allows us to avoid changing all the
5839 code that expects a pointer to something that expects
5840 a direct variable. */
5841 if (pass == 0)
5842 continue;
5844 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5845 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5847 x = build_receiver_ref (var, false, ctx);
5848 if (ctx->allocate_map)
5849 if (tree *allocatep = ctx->allocate_map->get (var))
5851 allocator = *allocatep;
5852 if (TREE_CODE (allocator) == TREE_LIST)
5853 allocator = TREE_PURPOSE (allocator);
5854 if (TREE_CODE (allocator) != INTEGER_CST)
5855 allocator = build_outer_var_ref (allocator, ctx);
5856 allocator = fold_convert (pointer_sized_int_node,
5857 allocator);
5858 allocate_ptr = unshare_expr (x);
5860 if (allocator == NULL_TREE)
5861 x = build_fold_addr_expr_loc (clause_loc, x);
5863 else if (lower_private_allocate (var, new_var, allocator,
5864 allocate_ptr,
5865 ilist, ctx, true, x))
5866 x = allocate_ptr;
5867 else if (TREE_CONSTANT (x))
5869 /* For reduction in SIMD loop, defer adding the
5870 initialization of the reference, because if we decide
5871 to use SIMD array for it, the initilization could cause
5872 expansion ICE. Ditto for other privatization clauses. */
5873 if (is_simd)
5874 x = NULL_TREE;
5875 else
5877 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5878 get_name (var));
5879 gimple_add_tmp_var (x);
5880 TREE_ADDRESSABLE (x) = 1;
5881 x = build_fold_addr_expr_loc (clause_loc, x);
5884 else
5886 tree atmp
5887 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5888 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5889 tree al = size_int (TYPE_ALIGN (rtype));
5890 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5893 if (x)
5895 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5896 gimplify_assign (new_var, x, ilist);
5899 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5901 else if ((c_kind == OMP_CLAUSE_REDUCTION
5902 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5903 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5905 if (pass == 0)
5906 continue;
5908 else if (pass != 0)
5909 continue;
5911 switch (OMP_CLAUSE_CODE (c))
5913 case OMP_CLAUSE_SHARED:
5914 /* Ignore shared directives in teams construct inside
5915 target construct. */
5916 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5917 && !is_host_teams_ctx (ctx))
5918 continue;
5919 /* Shared global vars are just accessed directly. */
5920 if (is_global_var (new_var))
5921 break;
5922 /* For taskloop firstprivate/lastprivate, represented
5923 as firstprivate and shared clause on the task, new_var
5924 is the firstprivate var. */
5925 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5926 break;
5927 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5928 needs to be delayed until after fixup_child_record_type so
5929 that we get the correct type during the dereference. */
5930 by_ref = use_pointer_for_field (var, ctx);
5931 x = build_receiver_ref (var, by_ref, ctx);
5932 SET_DECL_VALUE_EXPR (new_var, x);
5933 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5935 /* ??? If VAR is not passed by reference, and the variable
5936 hasn't been initialized yet, then we'll get a warning for
5937 the store into the omp_data_s structure. Ideally, we'd be
5938 able to notice this and not store anything at all, but
5939 we're generating code too early. Suppress the warning. */
5940 if (!by_ref)
5941 suppress_warning (var, OPT_Wuninitialized);
5942 break;
5944 case OMP_CLAUSE__CONDTEMP_:
5945 if (is_parallel_ctx (ctx))
5947 x = build_receiver_ref (var, false, ctx);
5948 SET_DECL_VALUE_EXPR (new_var, x);
5949 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5951 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5953 x = build_zero_cst (TREE_TYPE (var));
5954 goto do_private;
5956 break;
5958 case OMP_CLAUSE_LASTPRIVATE:
5959 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5960 break;
5961 /* FALLTHRU */
5963 case OMP_CLAUSE_PRIVATE:
5964 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5965 x = build_outer_var_ref (var, ctx);
5966 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5968 if (is_task_ctx (ctx))
5969 x = build_receiver_ref (var, false, ctx);
5970 else
5971 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5973 else
5974 x = NULL;
5975 do_private:
5976 tree nx;
5977 bool copy_ctor;
5978 copy_ctor = false;
5979 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5980 ilist, ctx, false, NULL_TREE);
5981 nx = unshare_expr (new_var);
5982 if (is_simd
5983 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5984 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5985 copy_ctor = true;
5986 if (copy_ctor)
5987 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5988 else
5989 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5990 if (is_simd)
5992 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5993 if ((TREE_ADDRESSABLE (new_var) || nx || y
5994 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5995 && (gimple_omp_for_collapse (ctx->stmt) != 1
5996 || (gimple_omp_for_index (ctx->stmt, 0)
5997 != new_var)))
5998 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5999 || omp_privatize_by_reference (var))
6000 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6001 ivar, lvar))
6003 if (omp_privatize_by_reference (var))
6005 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6006 tree new_vard = TREE_OPERAND (new_var, 0);
6007 gcc_assert (DECL_P (new_vard));
6008 SET_DECL_VALUE_EXPR (new_vard,
6009 build_fold_addr_expr (lvar));
6010 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6013 if (nx)
6015 tree iv = unshare_expr (ivar);
6016 if (copy_ctor)
6017 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
6019 else
6020 x = lang_hooks.decls.omp_clause_default_ctor (c,
6024 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
6026 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
6027 unshare_expr (ivar), x);
6028 nx = x;
6030 if (nx && x)
6031 gimplify_and_add (x, &llist[0]);
6032 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6033 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6035 tree v = new_var;
6036 if (!DECL_P (v))
6038 gcc_assert (TREE_CODE (v) == MEM_REF);
6039 v = TREE_OPERAND (v, 0);
6040 gcc_assert (DECL_P (v));
6042 v = *ctx->lastprivate_conditional_map->get (v);
6043 tree t = create_tmp_var (TREE_TYPE (v));
6044 tree z = build_zero_cst (TREE_TYPE (v));
6045 tree orig_v
6046 = build_outer_var_ref (var, ctx,
6047 OMP_CLAUSE_LASTPRIVATE);
6048 gimple_seq_add_stmt (dlist,
6049 gimple_build_assign (t, z));
6050 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
6051 tree civar = DECL_VALUE_EXPR (v);
6052 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
6053 civar = unshare_expr (civar);
6054 TREE_OPERAND (civar, 1) = sctx.idx;
6055 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
6056 unshare_expr (civar));
6057 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
6058 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
6059 orig_v, unshare_expr (ivar)));
6060 tree cond = build2 (LT_EXPR, boolean_type_node, t,
6061 civar);
6062 x = build3 (COND_EXPR, void_type_node, cond, x,
6063 void_node);
6064 gimple_seq tseq = NULL;
6065 gimplify_and_add (x, &tseq);
6066 if (ctx->outer)
6067 lower_omp (&tseq, ctx->outer);
6068 gimple_seq_add_seq (&llist[1], tseq);
6070 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6071 && ctx->for_simd_scan_phase)
6073 x = unshare_expr (ivar);
6074 tree orig_v
6075 = build_outer_var_ref (var, ctx,
6076 OMP_CLAUSE_LASTPRIVATE);
6077 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6078 orig_v);
6079 gimplify_and_add (x, &llist[0]);
6081 if (y)
6083 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
6084 if (y)
6085 gimplify_and_add (y, &llist[1]);
6087 break;
6089 if (omp_privatize_by_reference (var))
6091 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6092 tree new_vard = TREE_OPERAND (new_var, 0);
6093 gcc_assert (DECL_P (new_vard));
6094 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6095 x = TYPE_SIZE_UNIT (type);
6096 if (TREE_CONSTANT (x))
6098 x = create_tmp_var_raw (type, get_name (var));
6099 gimple_add_tmp_var (x);
6100 TREE_ADDRESSABLE (x) = 1;
6101 x = build_fold_addr_expr_loc (clause_loc, x);
6102 x = fold_convert_loc (clause_loc,
6103 TREE_TYPE (new_vard), x);
6104 gimplify_assign (new_vard, x, ilist);
6108 if (nx)
6109 gimplify_and_add (nx, ilist);
6110 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6111 && is_simd
6112 && ctx->for_simd_scan_phase)
6114 tree orig_v = build_outer_var_ref (var, ctx,
6115 OMP_CLAUSE_LASTPRIVATE);
6116 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
6117 orig_v);
6118 gimplify_and_add (x, ilist);
6120 /* FALLTHRU */
6122 do_dtor:
6123 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
6124 if (x)
6125 gimplify_and_add (x, dlist);
6126 if (allocator)
6128 if (!is_gimple_val (allocator))
6130 tree avar = create_tmp_var (TREE_TYPE (allocator));
6131 gimplify_assign (avar, allocator, dlist);
6132 allocator = avar;
6134 if (!is_gimple_val (allocate_ptr))
6136 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6137 gimplify_assign (apvar, allocate_ptr, dlist);
6138 allocate_ptr = apvar;
6140 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
6141 gimple *g
6142 = gimple_build_call (f, 2, allocate_ptr, allocator);
6143 gimple_seq_add_stmt (dlist, g);
6145 break;
6147 case OMP_CLAUSE_LINEAR:
6148 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6149 goto do_firstprivate;
6150 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6151 x = NULL;
6152 else
6153 x = build_outer_var_ref (var, ctx);
6154 goto do_private;
6156 case OMP_CLAUSE_FIRSTPRIVATE:
6157 if (is_task_ctx (ctx))
6159 if ((omp_privatize_by_reference (var)
6160 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6161 || is_variable_sized (var))
6162 goto do_dtor;
6163 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
6164 ctx))
6165 || use_pointer_for_field (var, NULL))
6167 x = build_receiver_ref (var, false, ctx);
6168 if (ctx->allocate_map)
6169 if (tree *allocatep = ctx->allocate_map->get (var))
6171 allocator = *allocatep;
6172 if (TREE_CODE (allocator) == TREE_LIST)
6173 allocator = TREE_PURPOSE (allocator);
6174 if (TREE_CODE (allocator) != INTEGER_CST)
6175 allocator = build_outer_var_ref (allocator, ctx);
6176 allocator = fold_convert (pointer_sized_int_node,
6177 allocator);
6178 allocate_ptr = unshare_expr (x);
6179 x = build_simple_mem_ref (x);
6180 TREE_THIS_NOTRAP (x) = 1;
6182 SET_DECL_VALUE_EXPR (new_var, x);
6183 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6184 goto do_dtor;
6187 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6188 && omp_privatize_by_reference (var))
6190 x = build_outer_var_ref (var, ctx);
6191 gcc_assert (TREE_CODE (x) == MEM_REF
6192 && integer_zerop (TREE_OPERAND (x, 1)));
6193 x = TREE_OPERAND (x, 0);
6194 x = lang_hooks.decls.omp_clause_copy_ctor
6195 (c, unshare_expr (new_var), x);
6196 gimplify_and_add (x, ilist);
6197 goto do_dtor;
6199 do_firstprivate:
6200 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6201 ilist, ctx, false, NULL_TREE);
6202 x = build_outer_var_ref (var, ctx);
6203 if (is_simd)
6205 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6206 && gimple_omp_for_combined_into_p (ctx->stmt))
6208 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6209 if (DECL_P (t))
6210 t = build_outer_var_ref (t, ctx);
6211 tree stept = TREE_TYPE (t);
6212 tree ct = omp_find_clause (clauses,
6213 OMP_CLAUSE__LOOPTEMP_);
6214 gcc_assert (ct);
6215 tree l = OMP_CLAUSE_DECL (ct);
6216 tree n1 = fd->loop.n1;
6217 tree step = fd->loop.step;
6218 tree itype = TREE_TYPE (l);
6219 if (POINTER_TYPE_P (itype))
6220 itype = signed_type_for (itype);
6221 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6222 if (TYPE_UNSIGNED (itype)
6223 && fd->loop.cond_code == GT_EXPR)
6224 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6225 fold_build1 (NEGATE_EXPR, itype, l),
6226 fold_build1 (NEGATE_EXPR,
6227 itype, step));
6228 else
6229 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6230 t = fold_build2 (MULT_EXPR, stept,
6231 fold_convert (stept, l), t);
6233 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6235 if (omp_privatize_by_reference (var))
6237 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6238 tree new_vard = TREE_OPERAND (new_var, 0);
6239 gcc_assert (DECL_P (new_vard));
6240 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6241 nx = TYPE_SIZE_UNIT (type);
6242 if (TREE_CONSTANT (nx))
6244 nx = create_tmp_var_raw (type,
6245 get_name (var));
6246 gimple_add_tmp_var (nx);
6247 TREE_ADDRESSABLE (nx) = 1;
6248 nx = build_fold_addr_expr_loc (clause_loc,
6249 nx);
6250 nx = fold_convert_loc (clause_loc,
6251 TREE_TYPE (new_vard),
6252 nx);
6253 gimplify_assign (new_vard, nx, ilist);
6257 x = lang_hooks.decls.omp_clause_linear_ctor
6258 (c, new_var, x, t);
6259 gimplify_and_add (x, ilist);
6260 goto do_dtor;
6263 if (POINTER_TYPE_P (TREE_TYPE (x)))
6264 x = fold_build_pointer_plus (x, t);
6265 else
6266 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x,
6267 fold_convert (TREE_TYPE (x), t));
6270 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6271 || TREE_ADDRESSABLE (new_var)
6272 || omp_privatize_by_reference (var))
6273 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6274 ivar, lvar))
6276 if (omp_privatize_by_reference (var))
6278 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6279 tree new_vard = TREE_OPERAND (new_var, 0);
6280 gcc_assert (DECL_P (new_vard));
6281 SET_DECL_VALUE_EXPR (new_vard,
6282 build_fold_addr_expr (lvar));
6283 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6285 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6287 tree iv = create_tmp_var (TREE_TYPE (new_var));
6288 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6289 gimplify_and_add (x, ilist);
6290 gimple_stmt_iterator gsi
6291 = gsi_start (*gimple_omp_body_ptr (ctx->stmt));
6292 gassign *g
6293 = gimple_build_assign (unshare_expr (lvar), iv);
6294 gsi_insert_before_without_update (&gsi, g,
6295 GSI_SAME_STMT);
6296 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6297 enum tree_code code = PLUS_EXPR;
6298 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6299 code = POINTER_PLUS_EXPR;
6300 g = gimple_build_assign (iv, code, iv, t);
6301 gsi_insert_before_without_update (&gsi, g,
6302 GSI_SAME_STMT);
6303 break;
6305 x = lang_hooks.decls.omp_clause_copy_ctor
6306 (c, unshare_expr (ivar), x);
6307 gimplify_and_add (x, &llist[0]);
6308 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6309 if (x)
6310 gimplify_and_add (x, &llist[1]);
6311 break;
6313 if (omp_privatize_by_reference (var))
6315 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6316 tree new_vard = TREE_OPERAND (new_var, 0);
6317 gcc_assert (DECL_P (new_vard));
6318 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6319 nx = TYPE_SIZE_UNIT (type);
6320 if (TREE_CONSTANT (nx))
6322 nx = create_tmp_var_raw (type, get_name (var));
6323 gimple_add_tmp_var (nx);
6324 TREE_ADDRESSABLE (nx) = 1;
6325 nx = build_fold_addr_expr_loc (clause_loc, nx);
6326 nx = fold_convert_loc (clause_loc,
6327 TREE_TYPE (new_vard), nx);
6328 gimplify_assign (new_vard, nx, ilist);
6332 x = lang_hooks.decls.omp_clause_copy_ctor
6333 (c, unshare_expr (new_var), x);
6334 gimplify_and_add (x, ilist);
6335 goto do_dtor;
6337 case OMP_CLAUSE__LOOPTEMP_:
6338 case OMP_CLAUSE__REDUCTEMP_:
6339 gcc_assert (is_taskreg_ctx (ctx));
6340 x = build_outer_var_ref (var, ctx);
6341 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6342 gimplify_and_add (x, ilist);
6343 break;
6345 case OMP_CLAUSE_COPYIN:
6346 by_ref = use_pointer_for_field (var, NULL);
6347 x = build_receiver_ref (var, by_ref, ctx);
6348 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6349 append_to_statement_list (x, &copyin_seq);
6350 copyin_by_ref |= by_ref;
6351 break;
6353 case OMP_CLAUSE_REDUCTION:
6354 case OMP_CLAUSE_IN_REDUCTION:
6355 /* OpenACC reductions are initialized using the
6356 GOACC_REDUCTION internal function. */
6357 if (is_gimple_omp_oacc (ctx->stmt))
6358 break;
6359 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6361 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6362 gimple *tseq;
6363 tree ptype = TREE_TYPE (placeholder);
6364 if (cond)
6366 x = error_mark_node;
6367 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6368 && !task_reduction_needs_orig_p)
6369 x = var;
6370 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6372 tree pptype = build_pointer_type (ptype);
6373 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6374 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6375 size_int (task_reduction_cnt_full
6376 + task_reduction_cntorig - 1),
6377 NULL_TREE, NULL_TREE);
6378 else
6380 unsigned int idx
6381 = *ctx->task_reduction_map->get (c);
6382 x = task_reduction_read (ilist, tskred_temp,
6383 pptype, 7 + 3 * idx);
6385 x = fold_convert (pptype, x);
6386 x = build_simple_mem_ref (x);
6389 else
6391 lower_private_allocate (var, new_var, allocator,
6392 allocate_ptr, ilist, ctx, false,
6393 NULL_TREE);
6394 x = build_outer_var_ref (var, ctx);
6396 if (omp_privatize_by_reference (var)
6397 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6398 x = build_fold_addr_expr_loc (clause_loc, x);
6400 SET_DECL_VALUE_EXPR (placeholder, x);
6401 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6402 tree new_vard = new_var;
6403 if (omp_privatize_by_reference (var))
6405 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6406 new_vard = TREE_OPERAND (new_var, 0);
6407 gcc_assert (DECL_P (new_vard));
6409 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6410 if (is_simd
6411 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6412 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6413 rvarp = &rvar;
6414 if (is_simd
6415 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6416 ivar, lvar, rvarp,
6417 &rvar2))
6419 if (new_vard == new_var)
6421 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6422 SET_DECL_VALUE_EXPR (new_var, ivar);
6424 else
6426 SET_DECL_VALUE_EXPR (new_vard,
6427 build_fold_addr_expr (ivar));
6428 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6430 x = lang_hooks.decls.omp_clause_default_ctor
6431 (c, unshare_expr (ivar),
6432 build_outer_var_ref (var, ctx));
6433 if (rvarp && ctx->for_simd_scan_phase)
6435 if (x)
6436 gimplify_and_add (x, &llist[0]);
6437 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6438 if (x)
6439 gimplify_and_add (x, &llist[1]);
6440 break;
6442 else if (rvarp)
6444 if (x)
6446 gimplify_and_add (x, &llist[0]);
6448 tree ivar2 = unshare_expr (lvar);
6449 TREE_OPERAND (ivar2, 1) = sctx.idx;
6450 x = lang_hooks.decls.omp_clause_default_ctor
6451 (c, ivar2, build_outer_var_ref (var, ctx));
6452 gimplify_and_add (x, &llist[0]);
6454 if (rvar2)
6456 x = lang_hooks.decls.omp_clause_default_ctor
6457 (c, unshare_expr (rvar2),
6458 build_outer_var_ref (var, ctx));
6459 gimplify_and_add (x, &llist[0]);
6462 /* For types that need construction, add another
6463 private var which will be default constructed
6464 and optionally initialized with
6465 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6466 loop we want to assign this value instead of
6467 constructing and destructing it in each
6468 iteration. */
6469 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6470 gimple_add_tmp_var (nv);
6471 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6472 ? rvar2
6473 : ivar, 0),
6474 nv);
6475 x = lang_hooks.decls.omp_clause_default_ctor
6476 (c, nv, build_outer_var_ref (var, ctx));
6477 gimplify_and_add (x, ilist);
6479 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6481 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6482 x = DECL_VALUE_EXPR (new_vard);
6483 tree vexpr = nv;
6484 if (new_vard != new_var)
6485 vexpr = build_fold_addr_expr (nv);
6486 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6487 lower_omp (&tseq, ctx);
6488 SET_DECL_VALUE_EXPR (new_vard, x);
6489 gimple_seq_add_seq (ilist, tseq);
6490 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6493 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6494 if (x)
6495 gimplify_and_add (x, dlist);
6498 tree ref = build_outer_var_ref (var, ctx);
6499 x = unshare_expr (ivar);
6500 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6501 ref);
6502 gimplify_and_add (x, &llist[0]);
6504 ref = build_outer_var_ref (var, ctx);
6505 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6506 rvar);
6507 gimplify_and_add (x, &llist[3]);
6509 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6510 if (new_vard == new_var)
6511 SET_DECL_VALUE_EXPR (new_var, lvar);
6512 else
6513 SET_DECL_VALUE_EXPR (new_vard,
6514 build_fold_addr_expr (lvar));
6516 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6517 if (x)
6518 gimplify_and_add (x, &llist[1]);
6520 tree ivar2 = unshare_expr (lvar);
6521 TREE_OPERAND (ivar2, 1) = sctx.idx;
6522 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6523 if (x)
6524 gimplify_and_add (x, &llist[1]);
6526 if (rvar2)
6528 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6529 if (x)
6530 gimplify_and_add (x, &llist[1]);
6532 break;
6534 if (x)
6535 gimplify_and_add (x, &llist[0]);
6536 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6538 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6539 lower_omp (&tseq, ctx);
6540 gimple_seq_add_seq (&llist[0], tseq);
6542 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6543 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6544 lower_omp (&tseq, ctx);
6545 gimple_seq_add_seq (&llist[1], tseq);
6546 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6547 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6548 if (new_vard == new_var)
6549 SET_DECL_VALUE_EXPR (new_var, lvar);
6550 else
6551 SET_DECL_VALUE_EXPR (new_vard,
6552 build_fold_addr_expr (lvar));
6553 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6554 if (x)
6555 gimplify_and_add (x, &llist[1]);
6556 break;
6558 /* If this is a reference to constant size reduction var
6559 with placeholder, we haven't emitted the initializer
6560 for it because it is undesirable if SIMD arrays are used.
6561 But if they aren't used, we need to emit the deferred
6562 initialization now. */
6563 else if (omp_privatize_by_reference (var) && is_simd)
6564 handle_simd_reference (clause_loc, new_vard, ilist);
6566 tree lab2 = NULL_TREE;
6567 if (cond)
6569 gimple *g;
6570 if (!is_parallel_ctx (ctx))
6572 tree condv = create_tmp_var (boolean_type_node);
6573 tree m = build_simple_mem_ref (cond);
6574 g = gimple_build_assign (condv, m);
6575 gimple_seq_add_stmt (ilist, g);
6576 tree lab1
6577 = create_artificial_label (UNKNOWN_LOCATION);
6578 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6579 g = gimple_build_cond (NE_EXPR, condv,
6580 boolean_false_node,
6581 lab2, lab1);
6582 gimple_seq_add_stmt (ilist, g);
6583 gimple_seq_add_stmt (ilist,
6584 gimple_build_label (lab1));
6586 g = gimple_build_assign (build_simple_mem_ref (cond),
6587 boolean_true_node);
6588 gimple_seq_add_stmt (ilist, g);
6590 x = lang_hooks.decls.omp_clause_default_ctor
6591 (c, unshare_expr (new_var),
6592 cond ? NULL_TREE
6593 : build_outer_var_ref (var, ctx));
6594 if (x)
6595 gimplify_and_add (x, ilist);
6597 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6598 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6600 if (ctx->for_simd_scan_phase)
6601 goto do_dtor;
6602 if (x || (!is_simd
6603 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6605 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6606 gimple_add_tmp_var (nv);
6607 ctx->cb.decl_map->put (new_vard, nv);
6608 x = lang_hooks.decls.omp_clause_default_ctor
6609 (c, nv, build_outer_var_ref (var, ctx));
6610 if (x)
6611 gimplify_and_add (x, ilist);
6612 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6614 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6615 tree vexpr = nv;
6616 if (new_vard != new_var)
6617 vexpr = build_fold_addr_expr (nv);
6618 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6619 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6620 lower_omp (&tseq, ctx);
6621 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6622 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6623 gimple_seq_add_seq (ilist, tseq);
6625 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6626 if (is_simd && ctx->scan_exclusive)
6628 tree nv2
6629 = create_tmp_var_raw (TREE_TYPE (new_var));
6630 gimple_add_tmp_var (nv2);
6631 ctx->cb.decl_map->put (nv, nv2);
6632 x = lang_hooks.decls.omp_clause_default_ctor
6633 (c, nv2, build_outer_var_ref (var, ctx));
6634 gimplify_and_add (x, ilist);
6635 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6636 if (x)
6637 gimplify_and_add (x, dlist);
6639 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6640 if (x)
6641 gimplify_and_add (x, dlist);
6643 else if (is_simd
6644 && ctx->scan_exclusive
6645 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6647 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6648 gimple_add_tmp_var (nv2);
6649 ctx->cb.decl_map->put (new_vard, nv2);
6650 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6651 if (x)
6652 gimplify_and_add (x, dlist);
6654 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6655 goto do_dtor;
6658 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6660 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6661 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6662 && is_omp_target (ctx->stmt))
6664 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6665 tree oldv = NULL_TREE;
6666 gcc_assert (d);
6667 if (DECL_HAS_VALUE_EXPR_P (d))
6668 oldv = DECL_VALUE_EXPR (d);
6669 SET_DECL_VALUE_EXPR (d, new_vard);
6670 DECL_HAS_VALUE_EXPR_P (d) = 1;
6671 lower_omp (&tseq, ctx);
6672 if (oldv)
6673 SET_DECL_VALUE_EXPR (d, oldv);
6674 else
6676 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6677 DECL_HAS_VALUE_EXPR_P (d) = 0;
6680 else
6681 lower_omp (&tseq, ctx);
6682 gimple_seq_add_seq (ilist, tseq);
6684 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6685 if (is_simd)
6687 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6688 lower_omp (&tseq, ctx);
6689 gimple_seq_add_seq (dlist, tseq);
6690 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6692 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6693 if (cond)
6695 if (lab2)
6696 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6697 break;
6699 goto do_dtor;
6701 else
6703 x = omp_reduction_init (c, TREE_TYPE (new_var));
6704 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6705 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6707 if (cond)
6709 gimple *g;
6710 tree lab2 = NULL_TREE;
6711 /* GOMP_taskgroup_reduction_register memsets the whole
6712 array to zero. If the initializer is zero, we don't
6713 need to initialize it again, just mark it as ever
6714 used unconditionally, i.e. cond = true. */
6715 if (initializer_zerop (x))
6717 g = gimple_build_assign (build_simple_mem_ref (cond),
6718 boolean_true_node);
6719 gimple_seq_add_stmt (ilist, g);
6720 break;
6723 /* Otherwise, emit
6724 if (!cond) { cond = true; new_var = x; } */
6725 if (!is_parallel_ctx (ctx))
6727 tree condv = create_tmp_var (boolean_type_node);
6728 tree m = build_simple_mem_ref (cond);
6729 g = gimple_build_assign (condv, m);
6730 gimple_seq_add_stmt (ilist, g);
6731 tree lab1
6732 = create_artificial_label (UNKNOWN_LOCATION);
6733 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6734 g = gimple_build_cond (NE_EXPR, condv,
6735 boolean_false_node,
6736 lab2, lab1);
6737 gimple_seq_add_stmt (ilist, g);
6738 gimple_seq_add_stmt (ilist,
6739 gimple_build_label (lab1));
6741 g = gimple_build_assign (build_simple_mem_ref (cond),
6742 boolean_true_node);
6743 gimple_seq_add_stmt (ilist, g);
6744 gimplify_assign (new_var, x, ilist);
6745 if (lab2)
6746 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6747 break;
6750 /* reduction(-:var) sums up the partial results, so it
6751 acts identically to reduction(+:var). */
6752 if (code == MINUS_EXPR)
6753 code = PLUS_EXPR;
6755 bool is_truth_op
6756 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6757 tree new_vard = new_var;
6758 if (is_simd && omp_privatize_by_reference (var))
6760 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6761 new_vard = TREE_OPERAND (new_var, 0);
6762 gcc_assert (DECL_P (new_vard));
6764 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6765 if (is_simd
6766 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6767 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6768 rvarp = &rvar;
6769 if (is_simd
6770 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6771 ivar, lvar, rvarp,
6772 &rvar2))
6774 if (new_vard != new_var)
6776 SET_DECL_VALUE_EXPR (new_vard,
6777 build_fold_addr_expr (lvar));
6778 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6781 tree ref = build_outer_var_ref (var, ctx);
6783 if (rvarp)
6785 if (ctx->for_simd_scan_phase)
6786 break;
6787 gimplify_assign (ivar, ref, &llist[0]);
6788 ref = build_outer_var_ref (var, ctx);
6789 gimplify_assign (ref, rvar, &llist[3]);
6790 break;
6793 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6795 if (sctx.is_simt)
6797 if (!simt_lane)
6798 simt_lane = create_tmp_var (unsigned_type_node);
6799 x = build_call_expr_internal_loc
6800 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6801 TREE_TYPE (ivar), 2, ivar, simt_lane);
6802 /* Make sure x is evaluated unconditionally. */
6803 tree bfly_var = create_tmp_var (TREE_TYPE (ivar));
6804 gimplify_assign (bfly_var, x, &llist[2]);
6805 x = build2 (code, TREE_TYPE (ivar), ivar, bfly_var);
6806 gimplify_assign (ivar, x, &llist[2]);
6808 tree ivar2 = ivar;
6809 tree ref2 = ref;
6810 if (is_truth_op)
6812 tree zero = build_zero_cst (TREE_TYPE (ivar));
6813 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6814 boolean_type_node, ivar,
6815 zero);
6816 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6817 boolean_type_node, ref,
6818 zero);
6820 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6821 if (is_truth_op)
6822 x = fold_convert (TREE_TYPE (ref), x);
6823 ref = build_outer_var_ref (var, ctx);
6824 gimplify_assign (ref, x, &llist[1]);
6827 else
6829 lower_private_allocate (var, new_var, allocator,
6830 allocate_ptr, ilist, ctx,
6831 false, NULL_TREE);
6832 if (omp_privatize_by_reference (var) && is_simd)
6833 handle_simd_reference (clause_loc, new_vard, ilist);
6834 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6835 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6836 break;
6837 gimplify_assign (new_var, x, ilist);
6838 if (is_simd)
6840 tree ref = build_outer_var_ref (var, ctx);
6841 tree new_var2 = new_var;
6842 tree ref2 = ref;
6843 if (is_truth_op)
6845 tree zero = build_zero_cst (TREE_TYPE (new_var));
6846 new_var2
6847 = fold_build2_loc (clause_loc, NE_EXPR,
6848 boolean_type_node, new_var,
6849 zero);
6850 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6851 boolean_type_node, ref,
6852 zero);
6854 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6855 if (is_truth_op)
6856 x = fold_convert (TREE_TYPE (new_var), x);
6857 ref = build_outer_var_ref (var, ctx);
6858 gimplify_assign (ref, x, dlist);
6860 if (allocator)
6861 goto do_dtor;
6864 break;
6866 default:
6867 gcc_unreachable ();
6871 if (tskred_avar)
6873 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6874 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6877 if (known_eq (sctx.max_vf, 1U))
6879 sctx.is_simt = false;
6880 if (ctx->lastprivate_conditional_map)
6882 if (gimple_omp_for_combined_into_p (ctx->stmt))
6884 /* Signal to lower_omp_1 that it should use parent context. */
6885 ctx->combined_into_simd_safelen1 = true;
6886 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6887 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6888 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6890 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6891 omp_context *outer = ctx->outer;
6892 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6893 outer = outer->outer;
6894 tree *v = ctx->lastprivate_conditional_map->get (o);
6895 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6896 tree *pv = outer->lastprivate_conditional_map->get (po);
6897 *v = *pv;
6900 else
6902 /* When not vectorized, treat lastprivate(conditional:) like
6903 normal lastprivate, as there will be just one simd lane
6904 writing the privatized variable. */
6905 delete ctx->lastprivate_conditional_map;
6906 ctx->lastprivate_conditional_map = NULL;
6911 if (nonconst_simd_if)
6913 if (sctx.lane == NULL_TREE)
6915 sctx.idx = create_tmp_var (unsigned_type_node);
6916 sctx.lane = create_tmp_var (unsigned_type_node);
6918 /* FIXME: For now. */
6919 sctx.is_simt = false;
6922 if (sctx.lane || sctx.is_simt)
6924 uid = create_tmp_var (ptr_type_node, "simduid");
6925 /* Don't want uninit warnings on simduid, it is always uninitialized,
6926 but we use it not for the value, but for the DECL_UID only. */
6927 suppress_warning (uid, OPT_Wuninitialized);
6928 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6929 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6930 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6931 gimple_omp_for_set_clauses (ctx->stmt, c);
6933 /* Emit calls denoting privatized variables and initializing a pointer to
6934 structure that holds private variables as fields after ompdevlow pass. */
6935 if (sctx.is_simt)
6937 sctx.simt_eargs[0] = uid;
6938 gimple *g
6939 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6940 gimple_call_set_lhs (g, uid);
6941 gimple_seq_add_stmt (ilist, g);
6942 sctx.simt_eargs.release ();
6944 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6945 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6946 gimple_call_set_lhs (g, simtrec);
6947 gimple_seq_add_stmt (ilist, g);
6949 if (sctx.lane)
6951 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6952 2 + (nonconst_simd_if != NULL),
6953 uid, integer_zero_node,
6954 nonconst_simd_if);
6955 gimple_call_set_lhs (g, sctx.lane);
6956 gimple_stmt_iterator gsi = gsi_start (*gimple_omp_body_ptr (ctx->stmt));
6957 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6958 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6959 build_int_cst (unsigned_type_node, 0));
6960 gimple_seq_add_stmt (ilist, g);
6961 if (sctx.lastlane)
6963 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6964 2, uid, sctx.lane);
6965 gimple_call_set_lhs (g, sctx.lastlane);
6966 gimple_seq_add_stmt (dlist, g);
6967 gimple_seq_add_seq (dlist, llist[3]);
6969 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6970 if (llist[2])
6972 tree simt_vf = create_tmp_var (unsigned_type_node);
6973 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6974 gimple_call_set_lhs (g, simt_vf);
6975 gimple_seq_add_stmt (dlist, g);
6977 tree t = build_int_cst (unsigned_type_node, 1);
6978 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6979 gimple_seq_add_stmt (dlist, g);
6981 t = build_int_cst (unsigned_type_node, 0);
6982 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6983 gimple_seq_add_stmt (dlist, g);
6985 tree body = create_artificial_label (UNKNOWN_LOCATION);
6986 tree header = create_artificial_label (UNKNOWN_LOCATION);
6987 tree end = create_artificial_label (UNKNOWN_LOCATION);
6988 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6989 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6991 gimple_seq_add_seq (dlist, llist[2]);
6993 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6994 gimple_seq_add_stmt (dlist, g);
6996 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6997 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6998 gimple_seq_add_stmt (dlist, g);
7000 gimple_seq_add_stmt (dlist, gimple_build_label (end));
7002 for (int i = 0; i < 2; i++)
7003 if (llist[i])
7005 tree vf = create_tmp_var (unsigned_type_node);
7006 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
7007 gimple_call_set_lhs (g, vf);
7008 gimple_seq *seq = i == 0 ? ilist : dlist;
7009 gimple_seq_add_stmt (seq, g);
7010 tree t = build_int_cst (unsigned_type_node, 0);
7011 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
7012 gimple_seq_add_stmt (seq, g);
7013 tree body = create_artificial_label (UNKNOWN_LOCATION);
7014 tree header = create_artificial_label (UNKNOWN_LOCATION);
7015 tree end = create_artificial_label (UNKNOWN_LOCATION);
7016 gimple_seq_add_stmt (seq, gimple_build_goto (header));
7017 gimple_seq_add_stmt (seq, gimple_build_label (body));
7018 gimple_seq_add_seq (seq, llist[i]);
7019 t = build_int_cst (unsigned_type_node, 1);
7020 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
7021 gimple_seq_add_stmt (seq, g);
7022 gimple_seq_add_stmt (seq, gimple_build_label (header));
7023 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
7024 gimple_seq_add_stmt (seq, g);
7025 gimple_seq_add_stmt (seq, gimple_build_label (end));
7028 if (sctx.is_simt)
7030 gimple_seq_add_seq (dlist, sctx.simt_dlist);
7031 gimple *g
7032 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
7033 gimple_seq_add_stmt (dlist, g);
7036 /* The copyin sequence is not to be executed by the main thread, since
7037 that would result in self-copies. Perhaps not visible to scalars,
7038 but it certainly is to C++ operator=. */
7039 if (copyin_seq)
7041 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
7043 x = build2 (NE_EXPR, boolean_type_node, x,
7044 build_int_cst (TREE_TYPE (x), 0));
7045 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
7046 gimplify_and_add (x, ilist);
7049 /* If any copyin variable is passed by reference, we must ensure the
7050 master thread doesn't modify it before it is copied over in all
7051 threads. Similarly for variables in both firstprivate and
7052 lastprivate clauses we need to ensure the lastprivate copying
7053 happens after firstprivate copying in all threads. And similarly
7054 for UDRs if initializer expression refers to omp_orig. */
7055 if (copyin_by_ref || lastprivate_firstprivate
7056 || (reduction_omp_orig_ref
7057 && !ctx->scan_inclusive
7058 && !ctx->scan_exclusive))
7060 /* Don't add any barrier for #pragma omp simd or
7061 #pragma omp distribute. */
7062 if (!is_task_ctx (ctx)
7063 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
7064 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
7065 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
7068 /* If max_vf is non-zero, then we can use only a vectorization factor
7069 up to the max_vf we chose. So stick it into the safelen clause. */
7070 if (maybe_ne (sctx.max_vf, 0U))
7072 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
7073 OMP_CLAUSE_SAFELEN);
7074 poly_uint64 safe_len;
7075 if (c == NULL_TREE
7076 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
7077 && maybe_gt (safe_len, sctx.max_vf)))
7079 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
7080 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
7081 sctx.max_vf);
7082 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
7083 gimple_omp_for_set_clauses (ctx->stmt, c);
7088 /* Create temporary variables for lastprivate(conditional:) implementation
7089 in context CTX with CLAUSES. */
7091 static void
7092 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
7094 tree iter_type = NULL_TREE;
7095 tree cond_ptr = NULL_TREE;
7096 tree iter_var = NULL_TREE;
7097 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7098 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
7099 tree next = *clauses;
7100 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
7101 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7102 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
7104 if (is_simd)
7106 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
7107 gcc_assert (cc);
7108 if (iter_type == NULL_TREE)
7110 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
7111 iter_var = create_tmp_var_raw (iter_type);
7112 DECL_CONTEXT (iter_var) = current_function_decl;
7113 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7114 DECL_CHAIN (iter_var) = ctx->block_vars;
7115 ctx->block_vars = iter_var;
7116 tree c3
7117 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7118 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7119 OMP_CLAUSE_DECL (c3) = iter_var;
7120 OMP_CLAUSE_CHAIN (c3) = *clauses;
7121 *clauses = c3;
7122 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7124 next = OMP_CLAUSE_CHAIN (cc);
7125 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7126 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
7127 ctx->lastprivate_conditional_map->put (o, v);
7128 continue;
7130 if (iter_type == NULL)
7132 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
7134 struct omp_for_data fd;
7135 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
7136 NULL);
7137 iter_type = unsigned_type_for (fd.iter_type);
7139 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
7140 iter_type = unsigned_type_node;
7141 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
7142 if (c2)
7144 cond_ptr
7145 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7146 OMP_CLAUSE_DECL (c2) = cond_ptr;
7148 else
7150 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7151 DECL_CONTEXT (cond_ptr) = current_function_decl;
7152 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7153 DECL_CHAIN (cond_ptr) = ctx->block_vars;
7154 ctx->block_vars = cond_ptr;
7155 c2 = build_omp_clause (UNKNOWN_LOCATION,
7156 OMP_CLAUSE__CONDTEMP_);
7157 OMP_CLAUSE_DECL (c2) = cond_ptr;
7158 OMP_CLAUSE_CHAIN (c2) = *clauses;
7159 *clauses = c2;
7161 iter_var = create_tmp_var_raw (iter_type);
7162 DECL_CONTEXT (iter_var) = current_function_decl;
7163 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7164 DECL_CHAIN (iter_var) = ctx->block_vars;
7165 ctx->block_vars = iter_var;
7166 tree c3
7167 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7168 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7169 OMP_CLAUSE_DECL (c3) = iter_var;
7170 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7171 OMP_CLAUSE_CHAIN (c2) = c3;
7172 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7174 tree v = create_tmp_var_raw (iter_type);
7175 DECL_CONTEXT (v) = current_function_decl;
7176 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7177 DECL_CHAIN (v) = ctx->block_vars;
7178 ctx->block_vars = v;
7179 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7180 ctx->lastprivate_conditional_map->put (o, v);
7185 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7186 both parallel and workshare constructs. PREDICATE may be NULL if it's
7187 always true. BODY_P is the sequence to insert early initialization
7188 if needed, STMT_LIST is where the non-conditional lastprivate handling
7189 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7190 section. */
7192 static void
7193 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7194 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7195 omp_context *ctx)
7197 tree x, c, label = NULL, orig_clauses = clauses;
7198 bool par_clauses = false;
7199 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7200 unsigned HOST_WIDE_INT conditional_off = 0;
7201 gimple_seq post_stmt_list = NULL;
7203 /* Early exit if there are no lastprivate or linear clauses. */
7204 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7205 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7206 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7207 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7208 break;
7209 if (clauses == NULL)
7211 /* If this was a workshare clause, see if it had been combined
7212 with its parallel. In that case, look for the clauses on the
7213 parallel statement itself. */
7214 if (is_parallel_ctx (ctx))
7215 return;
7217 ctx = ctx->outer;
7218 if (ctx == NULL || !is_parallel_ctx (ctx))
7219 return;
7221 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7222 OMP_CLAUSE_LASTPRIVATE);
7223 if (clauses == NULL)
7224 return;
7225 par_clauses = true;
7228 bool maybe_simt = false;
7229 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7230 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7232 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7233 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7234 if (simduid)
7235 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7238 if (predicate)
7240 gcond *stmt;
7241 tree label_true, arm1, arm2;
7242 enum tree_code pred_code = TREE_CODE (predicate);
7244 label = create_artificial_label (UNKNOWN_LOCATION);
7245 label_true = create_artificial_label (UNKNOWN_LOCATION);
7246 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7248 arm1 = TREE_OPERAND (predicate, 0);
7249 arm2 = TREE_OPERAND (predicate, 1);
7250 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7251 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7253 else
7255 arm1 = predicate;
7256 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7257 arm2 = boolean_false_node;
7258 pred_code = NE_EXPR;
7260 if (maybe_simt)
7262 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7263 c = fold_convert (integer_type_node, c);
7264 simtcond = create_tmp_var (integer_type_node);
7265 gimplify_assign (simtcond, c, stmt_list);
7266 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7267 1, simtcond);
7268 c = create_tmp_var (integer_type_node);
7269 gimple_call_set_lhs (g, c);
7270 gimple_seq_add_stmt (stmt_list, g);
7271 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7272 label_true, label);
7274 else
7275 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7276 gimple_seq_add_stmt (stmt_list, stmt);
7277 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7280 tree cond_ptr = NULL_TREE;
7281 for (c = clauses; c ;)
7283 tree var, new_var;
7284 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7285 gimple_seq *this_stmt_list = stmt_list;
7286 tree lab2 = NULL_TREE;
7288 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7289 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7290 && ctx->lastprivate_conditional_map
7291 && !ctx->combined_into_simd_safelen1)
7293 gcc_assert (body_p);
7294 if (simduid)
7295 goto next;
7296 if (cond_ptr == NULL_TREE)
7298 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7299 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7301 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7302 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7303 tree v = *ctx->lastprivate_conditional_map->get (o);
7304 gimplify_assign (v, build_zero_cst (type), body_p);
7305 this_stmt_list = cstmt_list;
7306 tree mem;
7307 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7309 mem = build2 (MEM_REF, type, cond_ptr,
7310 build_int_cst (TREE_TYPE (cond_ptr),
7311 conditional_off));
7312 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7314 else
7315 mem = build4 (ARRAY_REF, type, cond_ptr,
7316 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7317 tree mem2 = copy_node (mem);
7318 gimple_seq seq = NULL;
7319 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7320 gimple_seq_add_seq (this_stmt_list, seq);
7321 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7322 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7323 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7324 gimple_seq_add_stmt (this_stmt_list, g);
7325 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7326 gimplify_assign (mem2, v, this_stmt_list);
7328 else if (predicate
7329 && ctx->combined_into_simd_safelen1
7330 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7331 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7332 && ctx->lastprivate_conditional_map)
7333 this_stmt_list = &post_stmt_list;
7335 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7336 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7337 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7339 var = OMP_CLAUSE_DECL (c);
7340 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7341 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7342 && is_taskloop_ctx (ctx))
7344 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7345 new_var = lookup_decl (var, ctx->outer);
7347 else
7349 new_var = lookup_decl (var, ctx);
7350 /* Avoid uninitialized warnings for lastprivate and
7351 for linear iterators. */
7352 if (predicate
7353 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7354 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7355 suppress_warning (new_var, OPT_Wuninitialized);
7358 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7360 tree val = DECL_VALUE_EXPR (new_var);
7361 if (TREE_CODE (val) == ARRAY_REF
7362 && VAR_P (TREE_OPERAND (val, 0))
7363 && lookup_attribute ("omp simd array",
7364 DECL_ATTRIBUTES (TREE_OPERAND (val,
7365 0))))
7367 if (lastlane == NULL)
7369 lastlane = create_tmp_var (unsigned_type_node);
7370 gcall *g
7371 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7372 2, simduid,
7373 TREE_OPERAND (val, 1));
7374 gimple_call_set_lhs (g, lastlane);
7375 gimple_seq_add_stmt (this_stmt_list, g);
7377 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7378 TREE_OPERAND (val, 0), lastlane,
7379 NULL_TREE, NULL_TREE);
7380 TREE_THIS_NOTRAP (new_var) = 1;
7383 else if (maybe_simt)
7385 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7386 ? DECL_VALUE_EXPR (new_var)
7387 : new_var);
7388 if (simtlast == NULL)
7390 simtlast = create_tmp_var (unsigned_type_node);
7391 gcall *g = gimple_build_call_internal
7392 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7393 gimple_call_set_lhs (g, simtlast);
7394 gimple_seq_add_stmt (this_stmt_list, g);
7396 x = build_call_expr_internal_loc
7397 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7398 TREE_TYPE (val), 2, val, simtlast);
7399 new_var = unshare_expr (new_var);
7400 gimplify_assign (new_var, x, this_stmt_list);
7401 new_var = unshare_expr (new_var);
7404 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7405 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7407 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7408 gimple_seq_add_seq (this_stmt_list,
7409 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7410 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7412 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7413 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7415 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7416 gimple_seq_add_seq (this_stmt_list,
7417 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7418 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7421 x = NULL_TREE;
7422 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7423 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7424 && is_taskloop_ctx (ctx))
7426 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7427 ctx->outer->outer);
7428 if (is_global_var (ovar))
7429 x = ovar;
7431 if (!x)
7432 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7433 if (omp_privatize_by_reference (var))
7434 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7435 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7436 gimplify_and_add (x, this_stmt_list);
7438 if (lab2)
7439 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7442 next:
7443 c = OMP_CLAUSE_CHAIN (c);
7444 if (c == NULL && !par_clauses)
7446 /* If this was a workshare clause, see if it had been combined
7447 with its parallel. In that case, continue looking for the
7448 clauses also on the parallel statement itself. */
7449 if (is_parallel_ctx (ctx))
7450 break;
7452 ctx = ctx->outer;
7453 if (ctx == NULL || !is_parallel_ctx (ctx))
7454 break;
7456 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7457 OMP_CLAUSE_LASTPRIVATE);
7458 par_clauses = true;
7462 if (label)
7463 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7464 gimple_seq_add_seq (stmt_list, post_stmt_list);
7467 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7468 (which might be a placeholder). INNER is true if this is an inner
7469 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7470 join markers. Generate the before-loop forking sequence in
7471 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7472 general form of these sequences is
7474 GOACC_REDUCTION_SETUP
7475 GOACC_FORK
7476 GOACC_REDUCTION_INIT
7478 GOACC_REDUCTION_FINI
7479 GOACC_JOIN
7480 GOACC_REDUCTION_TEARDOWN. */
7482 static void
7483 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7484 gcall *fork, gcall *private_marker, gcall *join,
7485 gimple_seq *fork_seq, gimple_seq *join_seq,
7486 omp_context *ctx)
7488 gimple_seq before_fork = NULL;
7489 gimple_seq after_fork = NULL;
7490 gimple_seq before_join = NULL;
7491 gimple_seq after_join = NULL;
7492 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7493 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7494 unsigned offset = 0;
7496 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7497 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7499 /* No 'reduction' clauses on OpenACC 'kernels'. */
7500 gcc_checking_assert (!is_oacc_kernels (ctx));
7501 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7502 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7504 tree orig = OMP_CLAUSE_DECL (c);
7505 tree var = maybe_lookup_decl (orig, ctx);
7506 tree ref_to_res = NULL_TREE;
7507 tree incoming, outgoing, v1, v2, v3;
7508 bool is_private = false;
7510 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7511 if (rcode == MINUS_EXPR)
7512 rcode = PLUS_EXPR;
7513 else if (rcode == TRUTH_ANDIF_EXPR)
7514 rcode = BIT_AND_EXPR;
7515 else if (rcode == TRUTH_ORIF_EXPR)
7516 rcode = BIT_IOR_EXPR;
7517 tree op = build_int_cst (unsigned_type_node, rcode);
7519 if (!var)
7520 var = orig;
7522 incoming = outgoing = var;
7524 if (!inner)
7526 /* See if an outer construct also reduces this variable. */
7527 omp_context *outer = ctx;
7529 while (omp_context *probe = outer->outer)
7531 enum gimple_code type = gimple_code (probe->stmt);
7532 tree cls;
7534 switch (type)
7536 case GIMPLE_OMP_FOR:
7537 cls = gimple_omp_for_clauses (probe->stmt);
7538 break;
7540 case GIMPLE_OMP_TARGET:
7541 /* No 'reduction' clauses inside OpenACC 'kernels'
7542 regions. */
7543 gcc_checking_assert (!is_oacc_kernels (probe));
7545 if (!is_gimple_omp_offloaded (probe->stmt))
7546 goto do_lookup;
7548 cls = gimple_omp_target_clauses (probe->stmt);
7549 break;
7551 default:
7552 goto do_lookup;
7555 outer = probe;
7556 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7557 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7558 && orig == OMP_CLAUSE_DECL (cls))
7560 incoming = outgoing = lookup_decl (orig, probe);
7561 goto has_outer_reduction;
7563 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7564 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7565 && orig == OMP_CLAUSE_DECL (cls))
7567 is_private = true;
7568 goto do_lookup;
7572 do_lookup:
7573 /* This is the outermost construct with this reduction,
7574 see if there's a mapping for it. */
7575 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7576 && maybe_lookup_field (orig, outer) && !is_private)
7578 ref_to_res = build_receiver_ref (orig, false, outer);
7579 if (omp_privatize_by_reference (orig))
7580 ref_to_res = build_simple_mem_ref (ref_to_res);
7582 tree type = TREE_TYPE (var);
7583 if (POINTER_TYPE_P (type))
7584 type = TREE_TYPE (type);
7586 outgoing = var;
7587 incoming = omp_reduction_init_op (loc, rcode, type);
7589 else
7591 /* Try to look at enclosing contexts for reduction var,
7592 use original if no mapping found. */
7593 tree t = NULL_TREE;
7594 omp_context *c = ctx->outer;
7595 while (c && !t)
7597 t = maybe_lookup_decl (orig, c);
7598 c = c->outer;
7600 incoming = outgoing = (t ? t : orig);
7603 has_outer_reduction:;
7606 if (!ref_to_res)
7607 ref_to_res = integer_zero_node;
7609 if (omp_privatize_by_reference (orig))
7611 tree type = TREE_TYPE (var);
7612 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7614 if (!inner)
7616 tree x = create_tmp_var (TREE_TYPE (type), id);
7617 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7620 v1 = create_tmp_var (type, id);
7621 v2 = create_tmp_var (type, id);
7622 v3 = create_tmp_var (type, id);
7624 gimplify_assign (v1, var, fork_seq);
7625 gimplify_assign (v2, var, fork_seq);
7626 gimplify_assign (v3, var, fork_seq);
7628 var = build_simple_mem_ref (var);
7629 v1 = build_simple_mem_ref (v1);
7630 v2 = build_simple_mem_ref (v2);
7631 v3 = build_simple_mem_ref (v3);
7632 outgoing = build_simple_mem_ref (outgoing);
7634 if (!TREE_CONSTANT (incoming))
7635 incoming = build_simple_mem_ref (incoming);
7637 else
7638 /* Note that 'var' might be a mem ref. */
7639 v1 = v2 = v3 = var;
7641 /* Determine position in reduction buffer, which may be used
7642 by target. The parser has ensured that this is not a
7643 variable-sized type. */
7644 fixed_size_mode mode
7645 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7646 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7647 offset = (offset + align - 1) & ~(align - 1);
7648 tree off = build_int_cst (sizetype, offset);
7649 offset += GET_MODE_SIZE (mode);
7651 if (!init_code)
7653 init_code = build_int_cst (integer_type_node,
7654 IFN_GOACC_REDUCTION_INIT);
7655 fini_code = build_int_cst (integer_type_node,
7656 IFN_GOACC_REDUCTION_FINI);
7657 setup_code = build_int_cst (integer_type_node,
7658 IFN_GOACC_REDUCTION_SETUP);
7659 teardown_code = build_int_cst (integer_type_node,
7660 IFN_GOACC_REDUCTION_TEARDOWN);
7663 tree setup_call
7664 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7665 TREE_TYPE (var), 6, setup_code,
7666 unshare_expr (ref_to_res),
7667 unshare_expr (incoming),
7668 level, op, off);
7669 tree init_call
7670 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7671 TREE_TYPE (var), 6, init_code,
7672 unshare_expr (ref_to_res),
7673 unshare_expr (v1), level, op, off);
7674 tree fini_call
7675 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7676 TREE_TYPE (var), 6, fini_code,
7677 unshare_expr (ref_to_res),
7678 unshare_expr (v2), level, op, off);
7679 tree teardown_call
7680 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7681 TREE_TYPE (var), 6, teardown_code,
7682 ref_to_res, unshare_expr (v3),
7683 level, op, off);
7685 gimplify_assign (unshare_expr (v1), setup_call, &before_fork);
7686 gimplify_assign (unshare_expr (v2), init_call, &after_fork);
7687 gimplify_assign (unshare_expr (v3), fini_call, &before_join);
7688 gimplify_assign (unshare_expr (outgoing), teardown_call, &after_join);
7691 /* Now stitch things together. */
7692 gimple_seq_add_seq (fork_seq, before_fork);
7693 if (private_marker)
7694 gimple_seq_add_stmt (fork_seq, private_marker);
7695 if (fork)
7696 gimple_seq_add_stmt (fork_seq, fork);
7697 gimple_seq_add_seq (fork_seq, after_fork);
7699 gimple_seq_add_seq (join_seq, before_join);
7700 if (join)
7701 gimple_seq_add_stmt (join_seq, join);
7702 gimple_seq_add_seq (join_seq, after_join);
7705 /* Generate code to implement the REDUCTION clauses, append it
7706 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7707 that should be emitted also inside of the critical section,
7708 in that case clear *CLIST afterwards, otherwise leave it as is
7709 and let the caller emit it itself. */
7711 static void
7712 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7713 gimple_seq *clist, omp_context *ctx)
7715 gimple_seq sub_seq = NULL;
7716 gimple *stmt;
7717 tree x, c;
7718 int count = 0;
7720 /* OpenACC loop reductions are handled elsewhere. */
7721 if (is_gimple_omp_oacc (ctx->stmt))
7722 return;
7724 /* SIMD reductions are handled in lower_rec_input_clauses. */
7725 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7726 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7727 return;
7729 /* inscan reductions are handled elsewhere. */
7730 if (ctx->scan_inclusive || ctx->scan_exclusive)
7731 return;
7733 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7734 update in that case, otherwise use a lock. */
7735 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7736 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7737 && !OMP_CLAUSE_REDUCTION_TASK (c))
7739 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7740 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7742 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7743 count = -1;
7744 break;
7746 count++;
7749 if (count == 0)
7750 return;
7752 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7754 tree var, ref, new_var, orig_var;
7755 enum tree_code code;
7756 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7758 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7759 || OMP_CLAUSE_REDUCTION_TASK (c))
7760 continue;
7762 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7763 orig_var = var = OMP_CLAUSE_DECL (c);
7764 if (TREE_CODE (var) == MEM_REF)
7766 var = TREE_OPERAND (var, 0);
7767 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7768 var = TREE_OPERAND (var, 0);
7769 if (TREE_CODE (var) == ADDR_EXPR)
7770 var = TREE_OPERAND (var, 0);
7771 else
7773 /* If this is a pointer or referenced based array
7774 section, the var could be private in the outer
7775 context e.g. on orphaned loop construct. Pretend this
7776 is private variable's outer reference. */
7777 ccode = OMP_CLAUSE_PRIVATE;
7778 if (INDIRECT_REF_P (var))
7779 var = TREE_OPERAND (var, 0);
7781 orig_var = var;
7782 if (is_variable_sized (var))
7784 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7785 var = DECL_VALUE_EXPR (var);
7786 gcc_assert (INDIRECT_REF_P (var));
7787 var = TREE_OPERAND (var, 0);
7788 gcc_assert (DECL_P (var));
7791 new_var = lookup_decl (var, ctx);
7792 if (var == OMP_CLAUSE_DECL (c)
7793 && omp_privatize_by_reference (var))
7794 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7795 ref = build_outer_var_ref (var, ctx, ccode);
7796 code = OMP_CLAUSE_REDUCTION_CODE (c);
7798 /* reduction(-:var) sums up the partial results, so it acts
7799 identically to reduction(+:var). */
7800 if (code == MINUS_EXPR)
7801 code = PLUS_EXPR;
7803 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7804 if (count == 1)
7806 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7808 addr = save_expr (addr);
7809 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7810 tree new_var2 = new_var;
7811 tree ref2 = ref;
7812 if (is_truth_op)
7814 tree zero = build_zero_cst (TREE_TYPE (new_var));
7815 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7816 boolean_type_node, new_var, zero);
7817 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7818 ref, zero);
7820 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7821 new_var2);
7822 if (is_truth_op)
7823 x = fold_convert (TREE_TYPE (new_var), x);
7824 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7825 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7826 gimplify_and_add (x, stmt_seqp);
7827 return;
7829 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7831 tree d = OMP_CLAUSE_DECL (c);
7832 tree type = TREE_TYPE (d);
7833 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7834 tree i = create_tmp_var (TREE_TYPE (v));
7835 tree ptype = build_pointer_type (TREE_TYPE (type));
7836 tree bias = TREE_OPERAND (d, 1);
7837 d = TREE_OPERAND (d, 0);
7838 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7840 tree b = TREE_OPERAND (d, 1);
7841 b = maybe_lookup_decl (b, ctx);
7842 if (b == NULL)
7844 b = TREE_OPERAND (d, 1);
7845 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7847 if (integer_zerop (bias))
7848 bias = b;
7849 else
7851 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7852 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7853 TREE_TYPE (b), b, bias);
7855 d = TREE_OPERAND (d, 0);
7857 /* For ref build_outer_var_ref already performs this, so
7858 only new_var needs a dereference. */
7859 if (INDIRECT_REF_P (d))
7861 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7862 gcc_assert (omp_privatize_by_reference (var)
7863 && var == orig_var);
7865 else if (TREE_CODE (d) == ADDR_EXPR)
7867 if (orig_var == var)
7869 new_var = build_fold_addr_expr (new_var);
7870 ref = build_fold_addr_expr (ref);
7873 else
7875 gcc_assert (orig_var == var);
7876 if (omp_privatize_by_reference (var))
7877 ref = build_fold_addr_expr (ref);
7879 if (DECL_P (v))
7881 tree t = maybe_lookup_decl (v, ctx);
7882 if (t)
7883 v = t;
7884 else
7885 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7886 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7888 if (!integer_zerop (bias))
7890 bias = fold_convert_loc (clause_loc, sizetype, bias);
7891 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7892 TREE_TYPE (new_var), new_var,
7893 unshare_expr (bias));
7894 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7895 TREE_TYPE (ref), ref, bias);
7897 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7898 ref = fold_convert_loc (clause_loc, ptype, ref);
7899 tree m = create_tmp_var (ptype);
7900 gimplify_assign (m, new_var, stmt_seqp);
7901 new_var = m;
7902 m = create_tmp_var (ptype);
7903 gimplify_assign (m, ref, stmt_seqp);
7904 ref = m;
7905 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7906 tree body = create_artificial_label (UNKNOWN_LOCATION);
7907 tree end = create_artificial_label (UNKNOWN_LOCATION);
7908 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7909 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7910 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7911 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7913 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7914 tree decl_placeholder
7915 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7916 SET_DECL_VALUE_EXPR (placeholder, out);
7917 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7918 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7919 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7920 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7921 gimple_seq_add_seq (&sub_seq,
7922 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7923 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7924 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7925 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7927 else
7929 tree out2 = out;
7930 tree priv2 = priv;
7931 if (is_truth_op)
7933 tree zero = build_zero_cst (TREE_TYPE (out));
7934 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7935 boolean_type_node, out, zero);
7936 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7937 boolean_type_node, priv, zero);
7939 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7940 if (is_truth_op)
7941 x = fold_convert (TREE_TYPE (out), x);
7942 out = unshare_expr (out);
7943 gimplify_assign (out, x, &sub_seq);
7945 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7946 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7947 gimple_seq_add_stmt (&sub_seq, g);
7948 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7949 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7950 gimple_seq_add_stmt (&sub_seq, g);
7951 g = gimple_build_assign (i, PLUS_EXPR, i,
7952 build_int_cst (TREE_TYPE (i), 1));
7953 gimple_seq_add_stmt (&sub_seq, g);
7954 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7955 gimple_seq_add_stmt (&sub_seq, g);
7956 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7958 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7960 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7962 if (omp_privatize_by_reference (var)
7963 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7964 TREE_TYPE (ref)))
7965 ref = build_fold_addr_expr_loc (clause_loc, ref);
7966 SET_DECL_VALUE_EXPR (placeholder, ref);
7967 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7968 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7969 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7970 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7971 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7973 else
7975 tree new_var2 = new_var;
7976 tree ref2 = ref;
7977 if (is_truth_op)
7979 tree zero = build_zero_cst (TREE_TYPE (new_var));
7980 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7981 boolean_type_node, new_var, zero);
7982 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7983 ref, zero);
7985 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7986 if (is_truth_op)
7987 x = fold_convert (TREE_TYPE (new_var), x);
7988 ref = build_outer_var_ref (var, ctx);
7989 gimplify_assign (ref, x, &sub_seq);
7993 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7995 gimple_seq_add_stmt (stmt_seqp, stmt);
7997 gimple_seq_add_seq (stmt_seqp, sub_seq);
7999 if (clist)
8001 gimple_seq_add_seq (stmt_seqp, *clist);
8002 *clist = NULL;
8005 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
8007 gimple_seq_add_stmt (stmt_seqp, stmt);
8011 /* Generate code to implement the COPYPRIVATE clauses. */
8013 static void
8014 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
8015 omp_context *ctx)
8017 tree c;
8019 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8021 tree var, new_var, ref, x;
8022 bool by_ref;
8023 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8025 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
8026 continue;
8028 var = OMP_CLAUSE_DECL (c);
8029 by_ref = use_pointer_for_field (var, NULL);
8031 ref = build_sender_ref (var, ctx);
8032 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
8033 if (by_ref)
8035 x = build_fold_addr_expr_loc (clause_loc, new_var);
8036 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
8038 gimplify_assign (ref, x, slist);
8040 ref = build_receiver_ref (var, false, ctx);
8041 if (by_ref)
8043 ref = fold_convert_loc (clause_loc,
8044 build_pointer_type (TREE_TYPE (new_var)),
8045 ref);
8046 ref = build_fold_indirect_ref_loc (clause_loc, ref);
8048 if (omp_privatize_by_reference (var))
8050 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
8051 ref = build_simple_mem_ref_loc (clause_loc, ref);
8052 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
8054 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
8055 gimplify_and_add (x, rlist);
8060 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
8061 and REDUCTION from the sender (aka parent) side. */
8063 static void
8064 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
8065 omp_context *ctx)
8067 tree c, t;
8068 int ignored_looptemp = 0;
8069 bool is_taskloop = false;
8071 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
8072 by GOMP_taskloop. */
8073 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
8075 ignored_looptemp = 2;
8076 is_taskloop = true;
8079 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8081 tree val, ref, x, var;
8082 bool by_ref, do_in = false, do_out = false;
8083 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8085 switch (OMP_CLAUSE_CODE (c))
8087 case OMP_CLAUSE_PRIVATE:
8088 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
8089 break;
8090 continue;
8091 case OMP_CLAUSE_FIRSTPRIVATE:
8092 case OMP_CLAUSE_COPYIN:
8093 case OMP_CLAUSE_LASTPRIVATE:
8094 case OMP_CLAUSE_IN_REDUCTION:
8095 case OMP_CLAUSE__REDUCTEMP_:
8096 break;
8097 case OMP_CLAUSE_REDUCTION:
8098 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
8099 continue;
8100 break;
8101 case OMP_CLAUSE_SHARED:
8102 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8103 break;
8104 continue;
8105 case OMP_CLAUSE__LOOPTEMP_:
8106 if (ignored_looptemp)
8108 ignored_looptemp--;
8109 continue;
8111 break;
8112 default:
8113 continue;
8116 val = OMP_CLAUSE_DECL (c);
8117 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8118 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
8119 && TREE_CODE (val) == MEM_REF)
8121 val = TREE_OPERAND (val, 0);
8122 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
8123 val = TREE_OPERAND (val, 0);
8124 if (INDIRECT_REF_P (val)
8125 || TREE_CODE (val) == ADDR_EXPR)
8126 val = TREE_OPERAND (val, 0);
8127 if (is_variable_sized (val))
8128 continue;
8131 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8132 outer taskloop region. */
8133 omp_context *ctx_for_o = ctx;
8134 if (is_taskloop
8135 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8136 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8137 ctx_for_o = ctx->outer;
8139 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
8141 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8142 && is_global_var (var)
8143 && (val == OMP_CLAUSE_DECL (c)
8144 || !is_task_ctx (ctx)
8145 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8146 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8147 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8148 != POINTER_TYPE)))))
8149 continue;
8151 t = omp_member_access_dummy_var (var);
8152 if (t)
8154 var = DECL_VALUE_EXPR (var);
8155 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
8156 if (o != t)
8157 var = unshare_and_remap (var, t, o);
8158 else
8159 var = unshare_expr (var);
8162 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8164 /* Handle taskloop firstprivate/lastprivate, where the
8165 lastprivate on GIMPLE_OMP_TASK is represented as
8166 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8167 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
8168 x = omp_build_component_ref (ctx->sender_decl, f);
8169 if (use_pointer_for_field (val, ctx))
8170 var = build_fold_addr_expr (var);
8171 gimplify_assign (x, var, ilist);
8172 DECL_ABSTRACT_ORIGIN (f) = NULL;
8173 continue;
8176 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8177 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8178 || val == OMP_CLAUSE_DECL (c))
8179 && is_variable_sized (val))
8180 continue;
8181 by_ref = use_pointer_for_field (val, NULL);
8183 switch (OMP_CLAUSE_CODE (c))
8185 case OMP_CLAUSE_FIRSTPRIVATE:
8186 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8187 && !by_ref
8188 && is_task_ctx (ctx))
8189 suppress_warning (var);
8190 do_in = true;
8191 break;
8193 case OMP_CLAUSE_PRIVATE:
8194 case OMP_CLAUSE_COPYIN:
8195 case OMP_CLAUSE__LOOPTEMP_:
8196 case OMP_CLAUSE__REDUCTEMP_:
8197 do_in = true;
8198 break;
8200 case OMP_CLAUSE_LASTPRIVATE:
8201 if (by_ref || omp_privatize_by_reference (val))
8203 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8204 continue;
8205 do_in = true;
8207 else
8209 do_out = true;
8210 if (lang_hooks.decls.omp_private_outer_ref (val))
8211 do_in = true;
8213 break;
8215 case OMP_CLAUSE_REDUCTION:
8216 case OMP_CLAUSE_IN_REDUCTION:
8217 do_in = true;
8218 if (val == OMP_CLAUSE_DECL (c))
8220 if (is_task_ctx (ctx))
8221 by_ref = use_pointer_for_field (val, ctx);
8222 else
8223 do_out = !(by_ref || omp_privatize_by_reference (val));
8225 else
8226 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8227 break;
8229 default:
8230 gcc_unreachable ();
8233 if (do_in)
8235 ref = build_sender_ref (val, ctx);
8236 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8237 gimplify_assign (ref, x, ilist);
8238 if (is_task_ctx (ctx))
8239 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8242 if (do_out)
8244 ref = build_sender_ref (val, ctx);
8245 gimplify_assign (var, ref, olist);
8250 /* Generate code to implement SHARED from the sender (aka parent)
8251 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8252 list things that got automatically shared. */
8254 static void
8255 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8257 tree var, ovar, nvar, t, f, x, record_type;
8259 if (ctx->record_type == NULL)
8260 return;
8262 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8263 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8265 ovar = DECL_ABSTRACT_ORIGIN (f);
8266 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8267 continue;
8269 nvar = maybe_lookup_decl (ovar, ctx);
8270 if (!nvar
8271 || !DECL_HAS_VALUE_EXPR_P (nvar)
8272 || (ctx->allocate_map
8273 && ctx->allocate_map->get (ovar)))
8274 continue;
8276 /* If CTX is a nested parallel directive. Find the immediately
8277 enclosing parallel or workshare construct that contains a
8278 mapping for OVAR. */
8279 var = lookup_decl_in_outer_ctx (ovar, ctx);
8281 t = omp_member_access_dummy_var (var);
8282 if (t)
8284 var = DECL_VALUE_EXPR (var);
8285 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8286 if (o != t)
8287 var = unshare_and_remap (var, t, o);
8288 else
8289 var = unshare_expr (var);
8292 if (use_pointer_for_field (ovar, ctx))
8294 x = build_sender_ref (ovar, ctx);
8295 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8296 && TREE_TYPE (f) == TREE_TYPE (ovar))
8298 gcc_assert (is_parallel_ctx (ctx)
8299 && DECL_ARTIFICIAL (ovar));
8300 /* _condtemp_ clause. */
8301 var = build_constructor (TREE_TYPE (x), NULL);
8303 else
8304 var = build_fold_addr_expr (var);
8305 gimplify_assign (x, var, ilist);
8307 else
8309 x = build_sender_ref (ovar, ctx);
8310 gimplify_assign (x, var, ilist);
8312 if (!TREE_READONLY (var)
8313 /* We don't need to receive a new reference to a result
8314 or parm decl. In fact we may not store to it as we will
8315 invalidate any pending RSO and generate wrong gimple
8316 during inlining. */
8317 && !((TREE_CODE (var) == RESULT_DECL
8318 || TREE_CODE (var) == PARM_DECL)
8319 && DECL_BY_REFERENCE (var)))
8321 x = build_sender_ref (ovar, ctx);
8322 gimplify_assign (var, x, olist);
8328 /* Emit an OpenACC head marker call, encapulating the partitioning and
8329 other information that must be processed by the target compiler.
8330 Return the maximum number of dimensions the associated loop might
8331 be partitioned over. */
8333 static unsigned
8334 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8335 gimple_seq *seq, omp_context *ctx)
8337 unsigned levels = 0;
8338 unsigned tag = 0;
8339 tree gang_static = NULL_TREE;
8340 auto_vec<tree, 5> args;
8342 args.quick_push (build_int_cst
8343 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8344 args.quick_push (ddvar);
8345 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8347 switch (OMP_CLAUSE_CODE (c))
8349 case OMP_CLAUSE_GANG:
8350 tag |= OLF_DIM_GANG;
8351 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8352 /* static:* is represented by -1, and we can ignore it, as
8353 scheduling is always static. */
8354 if (gang_static && integer_minus_onep (gang_static))
8355 gang_static = NULL_TREE;
8356 levels++;
8357 break;
8359 case OMP_CLAUSE_WORKER:
8360 tag |= OLF_DIM_WORKER;
8361 levels++;
8362 break;
8364 case OMP_CLAUSE_VECTOR:
8365 tag |= OLF_DIM_VECTOR;
8366 levels++;
8367 break;
8369 case OMP_CLAUSE_SEQ:
8370 tag |= OLF_SEQ;
8371 break;
8373 case OMP_CLAUSE_AUTO:
8374 tag |= OLF_AUTO;
8375 break;
8377 case OMP_CLAUSE_INDEPENDENT:
8378 tag |= OLF_INDEPENDENT;
8379 break;
8381 case OMP_CLAUSE_TILE:
8382 tag |= OLF_TILE;
8383 break;
8385 case OMP_CLAUSE_REDUCTION:
8386 tag |= OLF_REDUCTION;
8387 break;
8389 default:
8390 continue;
8394 if (gang_static)
8396 if (DECL_P (gang_static))
8397 gang_static = build_outer_var_ref (gang_static, ctx);
8398 tag |= OLF_GANG_STATIC;
8401 omp_context *tgt = enclosing_target_ctx (ctx);
8402 if (!tgt || is_oacc_parallel_or_serial (tgt))
8404 else if (is_oacc_kernels (tgt))
8405 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8406 gcc_unreachable ();
8407 else if (is_oacc_kernels_decomposed_part (tgt))
8409 else
8410 gcc_unreachable ();
8412 /* In a parallel region, loops are implicitly INDEPENDENT. */
8413 if (!tgt || is_oacc_parallel_or_serial (tgt))
8414 tag |= OLF_INDEPENDENT;
8416 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8417 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8418 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8420 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8421 gcc_assert (!(tag & OLF_AUTO));
8424 if (tag & OLF_TILE)
8425 /* Tiling could use all 3 levels. */
8426 levels = 3;
8427 else
8429 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8430 Ensure at least one level, or 2 for possible auto
8431 partitioning */
8432 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8433 << OLF_DIM_BASE) | OLF_SEQ));
8435 if (levels < 1u + maybe_auto)
8436 levels = 1u + maybe_auto;
8439 args.quick_push (build_int_cst (integer_type_node, levels));
8440 args.quick_push (build_int_cst (integer_type_node, tag));
8441 if (gang_static)
8442 args.quick_push (gang_static);
8444 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8445 gimple_set_location (call, loc);
8446 gimple_set_lhs (call, ddvar);
8447 gimple_seq_add_stmt (seq, call);
8449 return levels;
8452 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8453 partitioning level of the enclosed region. */
8455 static void
8456 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8457 tree tofollow, gimple_seq *seq)
8459 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8460 : IFN_UNIQUE_OACC_TAIL_MARK);
8461 tree marker = build_int_cst (integer_type_node, marker_kind);
8462 int nargs = 2 + (tofollow != NULL_TREE);
8463 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8464 marker, ddvar, tofollow);
8465 gimple_set_location (call, loc);
8466 gimple_set_lhs (call, ddvar);
8467 gimple_seq_add_stmt (seq, call);
8470 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8471 the loop clauses, from which we extract reductions. Initialize
8472 HEAD and TAIL. */
8474 static void
8475 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8476 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8478 bool inner = false;
8479 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8480 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8482 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8484 if (private_marker)
8486 gimple_set_location (private_marker, loc);
8487 gimple_call_set_lhs (private_marker, ddvar);
8488 gimple_call_set_arg (private_marker, 1, ddvar);
8491 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8492 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8494 gcc_assert (count);
8495 for (unsigned done = 1; count; count--, done++)
8497 gimple_seq fork_seq = NULL;
8498 gimple_seq join_seq = NULL;
8500 tree place = build_int_cst (integer_type_node, -1);
8501 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8502 fork_kind, ddvar, place);
8503 gimple_set_location (fork, loc);
8504 gimple_set_lhs (fork, ddvar);
8506 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8507 join_kind, ddvar, place);
8508 gimple_set_location (join, loc);
8509 gimple_set_lhs (join, ddvar);
8511 /* Mark the beginning of this level sequence. */
8512 if (inner)
8513 lower_oacc_loop_marker (loc, ddvar, true,
8514 build_int_cst (integer_type_node, count),
8515 &fork_seq);
8516 lower_oacc_loop_marker (loc, ddvar, false,
8517 build_int_cst (integer_type_node, done),
8518 &join_seq);
8520 lower_oacc_reductions (loc, clauses, place, inner,
8521 fork, (count == 1) ? private_marker : NULL,
8522 join, &fork_seq, &join_seq, ctx);
8524 /* Append this level to head. */
8525 gimple_seq_add_seq (head, fork_seq);
8526 /* Prepend it to tail. */
8527 gimple_seq_add_seq (&join_seq, *tail);
8528 *tail = join_seq;
8530 inner = true;
8533 /* Mark the end of the sequence. */
8534 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8535 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8538 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8539 catch handler and return it. This prevents programs from violating the
8540 structured block semantics with throws. */
8542 static gimple_seq
8543 maybe_catch_exception (gimple_seq body)
8545 gimple *g;
8546 tree decl;
8548 if (!flag_exceptions)
8549 return body;
8551 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8552 decl = lang_hooks.eh_protect_cleanup_actions ();
8553 else
8554 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8556 g = gimple_build_eh_must_not_throw (decl);
8557 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8558 GIMPLE_TRY_CATCH);
8560 return gimple_seq_alloc_with_stmt (g);
8564 /* Routines to lower OMP directives into OMP-GIMPLE. */
8566 /* If ctx is a worksharing context inside of a cancellable parallel
8567 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8568 and conditional branch to parallel's cancel_label to handle
8569 cancellation in the implicit barrier. */
8571 static void
8572 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8573 gimple_seq *body)
8575 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8576 if (gimple_omp_return_nowait_p (omp_return))
8577 return;
8578 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8579 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8580 && outer->cancellable)
8582 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8583 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8584 tree lhs = create_tmp_var (c_bool_type);
8585 gimple_omp_return_set_lhs (omp_return, lhs);
8586 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8587 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8588 fold_convert (c_bool_type,
8589 boolean_false_node),
8590 outer->cancel_label, fallthru_label);
8591 gimple_seq_add_stmt (body, g);
8592 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8594 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8595 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8596 return;
8599 /* Find the first task_reduction or reduction clause or return NULL
8600 if there are none. */
8602 static inline tree
8603 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8604 enum omp_clause_code ccode)
8606 while (1)
8608 clauses = omp_find_clause (clauses, ccode);
8609 if (clauses == NULL_TREE)
8610 return NULL_TREE;
8611 if (ccode != OMP_CLAUSE_REDUCTION
8612 || code == OMP_TASKLOOP
8613 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8614 return clauses;
8615 clauses = OMP_CLAUSE_CHAIN (clauses);
8619 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8620 gimple_seq *, gimple_seq *);
8622 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8623 CTX is the enclosing OMP context for the current statement. */
8625 static void
8626 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8628 tree block, control;
8629 gimple_stmt_iterator tgsi;
8630 gomp_sections *stmt;
8631 gimple *t;
8632 gbind *new_stmt, *bind;
8633 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8635 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8637 push_gimplify_context ();
8639 dlist = NULL;
8640 ilist = NULL;
8642 tree rclauses
8643 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8644 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8645 tree rtmp = NULL_TREE;
8646 if (rclauses)
8648 tree type = build_pointer_type (pointer_sized_int_node);
8649 tree temp = create_tmp_var (type);
8650 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8651 OMP_CLAUSE_DECL (c) = temp;
8652 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8653 gimple_omp_sections_set_clauses (stmt, c);
8654 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8655 gimple_omp_sections_clauses (stmt),
8656 &ilist, &tred_dlist);
8657 rclauses = c;
8658 rtmp = make_ssa_name (type);
8659 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8662 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8663 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8665 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8666 &ilist, &dlist, ctx, NULL);
8668 control = create_tmp_var (unsigned_type_node, ".section");
8669 gimple_omp_sections_set_control (stmt, control);
8671 new_body = gimple_omp_body (stmt);
8672 gimple_omp_set_body (stmt, NULL);
8673 tgsi = gsi_start (new_body);
8674 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8676 omp_context *sctx;
8677 gimple *sec_start;
8679 sec_start = gsi_stmt (tgsi);
8680 sctx = maybe_lookup_ctx (sec_start);
8681 gcc_assert (sctx);
8683 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8684 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8685 GSI_CONTINUE_LINKING);
8686 gimple_omp_set_body (sec_start, NULL);
8688 if (gsi_one_before_end_p (tgsi))
8690 gimple_seq l = NULL;
8691 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8692 &ilist, &l, &clist, ctx);
8693 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8694 gimple_omp_section_set_last (sec_start);
8697 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8698 GSI_CONTINUE_LINKING);
8701 block = make_node (BLOCK);
8702 bind = gimple_build_bind (NULL, new_body, block);
8704 olist = NULL;
8705 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8706 &clist, ctx);
8707 if (clist)
8709 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8710 gcall *g = gimple_build_call (fndecl, 0);
8711 gimple_seq_add_stmt (&olist, g);
8712 gimple_seq_add_seq (&olist, clist);
8713 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8714 g = gimple_build_call (fndecl, 0);
8715 gimple_seq_add_stmt (&olist, g);
8718 block = make_node (BLOCK);
8719 new_stmt = gimple_build_bind (NULL, NULL, block);
8720 gsi_replace (gsi_p, new_stmt, true);
8722 pop_gimplify_context (new_stmt);
8723 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8724 BLOCK_VARS (block) = gimple_bind_vars (bind);
8725 if (BLOCK_VARS (block))
8726 TREE_USED (block) = 1;
8728 new_body = NULL;
8729 gimple_seq_add_seq (&new_body, ilist);
8730 gimple_seq_add_stmt (&new_body, stmt);
8731 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8732 gimple_seq_add_stmt (&new_body, bind);
8734 t = gimple_build_omp_continue (control, control);
8735 gimple_seq_add_stmt (&new_body, t);
8737 gimple_seq_add_seq (&new_body, olist);
8738 if (ctx->cancellable)
8739 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8740 gimple_seq_add_seq (&new_body, dlist);
8742 new_body = maybe_catch_exception (new_body);
8744 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8745 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8746 t = gimple_build_omp_return (nowait);
8747 gimple_seq_add_stmt (&new_body, t);
8748 gimple_seq_add_seq (&new_body, tred_dlist);
8749 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8751 if (rclauses)
8752 OMP_CLAUSE_DECL (rclauses) = rtmp;
8754 gimple_bind_set_body (new_stmt, new_body);
8758 /* A subroutine of lower_omp_single. Expand the simple form of
8759 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8761 if (GOMP_single_start ())
8762 BODY;
8763 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8765 FIXME. It may be better to delay expanding the logic of this until
8766 pass_expand_omp. The expanded logic may make the job more difficult
8767 to a synchronization analysis pass. */
8769 static void
8770 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8772 location_t loc = gimple_location (single_stmt);
8773 tree tlabel = create_artificial_label (loc);
8774 tree flabel = create_artificial_label (loc);
8775 gimple *call, *cond;
8776 tree lhs, decl;
8778 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8779 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8780 call = gimple_build_call (decl, 0);
8781 gimple_call_set_lhs (call, lhs);
8782 gimple_seq_add_stmt (pre_p, call);
8784 cond = gimple_build_cond (EQ_EXPR, lhs,
8785 fold_convert_loc (loc, TREE_TYPE (lhs),
8786 boolean_true_node),
8787 tlabel, flabel);
8788 gimple_seq_add_stmt (pre_p, cond);
8789 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8790 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8791 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8795 /* A subroutine of lower_omp_single. Expand the simple form of
8796 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8798 #pragma omp single copyprivate (a, b, c)
8800 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8803 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8805 BODY;
8806 copyout.a = a;
8807 copyout.b = b;
8808 copyout.c = c;
8809 GOMP_single_copy_end (&copyout);
8811 else
8813 a = copyout_p->a;
8814 b = copyout_p->b;
8815 c = copyout_p->c;
8817 GOMP_barrier ();
8820 FIXME. It may be better to delay expanding the logic of this until
8821 pass_expand_omp. The expanded logic may make the job more difficult
8822 to a synchronization analysis pass. */
8824 static void
8825 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8826 omp_context *ctx)
8828 tree ptr_type, t, l0, l1, l2, bfn_decl;
8829 gimple_seq copyin_seq;
8830 location_t loc = gimple_location (single_stmt);
8832 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8834 ptr_type = build_pointer_type (ctx->record_type);
8835 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8837 l0 = create_artificial_label (loc);
8838 l1 = create_artificial_label (loc);
8839 l2 = create_artificial_label (loc);
8841 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8842 t = build_call_expr_loc (loc, bfn_decl, 0);
8843 t = fold_convert_loc (loc, ptr_type, t);
8844 gimplify_assign (ctx->receiver_decl, t, pre_p);
8846 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8847 build_int_cst (ptr_type, 0));
8848 t = build3 (COND_EXPR, void_type_node, t,
8849 build_and_jump (&l0), build_and_jump (&l1));
8850 gimplify_and_add (t, pre_p);
8852 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8854 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8856 copyin_seq = NULL;
8857 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8858 &copyin_seq, ctx);
8860 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8861 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8862 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8863 gimplify_and_add (t, pre_p);
8865 t = build_and_jump (&l2);
8866 gimplify_and_add (t, pre_p);
8868 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8870 gimple_seq_add_seq (pre_p, copyin_seq);
8872 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8876 /* Expand code for an OpenMP single directive. */
8878 static void
8879 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8881 tree block;
8882 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8883 gbind *bind;
8884 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8886 push_gimplify_context ();
8888 block = make_node (BLOCK);
8889 bind = gimple_build_bind (NULL, NULL, block);
8890 gsi_replace (gsi_p, bind, true);
8891 bind_body = NULL;
8892 dlist = NULL;
8893 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8894 &bind_body, &dlist, ctx, NULL);
8895 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8897 gimple_seq_add_stmt (&bind_body, single_stmt);
8899 if (ctx->record_type)
8900 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8901 else
8902 lower_omp_single_simple (single_stmt, &bind_body);
8904 gimple_omp_set_body (single_stmt, NULL);
8906 gimple_seq_add_seq (&bind_body, dlist);
8908 bind_body = maybe_catch_exception (bind_body);
8910 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8911 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8912 gimple *g = gimple_build_omp_return (nowait);
8913 gimple_seq_add_stmt (&bind_body_tail, g);
8914 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8915 if (ctx->record_type)
8917 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8918 tree clobber = build_clobber (ctx->record_type);
8919 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8920 clobber), GSI_SAME_STMT);
8922 gimple_seq_add_seq (&bind_body, bind_body_tail);
8923 gimple_bind_set_body (bind, bind_body);
8925 pop_gimplify_context (bind);
8927 gimple_bind_append_vars (bind, ctx->block_vars);
8928 BLOCK_VARS (block) = ctx->block_vars;
8929 if (BLOCK_VARS (block))
8930 TREE_USED (block) = 1;
8934 /* Lower code for an OMP scope directive. */
8936 static void
8937 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8939 tree block;
8940 gimple *scope_stmt = gsi_stmt (*gsi_p);
8941 gbind *bind;
8942 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8943 gimple_seq tred_dlist = NULL;
8945 push_gimplify_context ();
8947 block = make_node (BLOCK);
8948 bind = gimple_build_bind (NULL, NULL, block);
8949 gsi_replace (gsi_p, bind, true);
8950 bind_body = NULL;
8951 dlist = NULL;
8953 tree rclauses
8954 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8955 OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8956 if (rclauses)
8958 tree type = build_pointer_type (pointer_sized_int_node);
8959 tree temp = create_tmp_var (type);
8960 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8961 OMP_CLAUSE_DECL (c) = temp;
8962 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8963 gimple_omp_scope_set_clauses (scope_stmt, c);
8964 lower_omp_task_reductions (ctx, OMP_SCOPE,
8965 gimple_omp_scope_clauses (scope_stmt),
8966 &bind_body, &tred_dlist);
8967 rclauses = c;
8968 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8969 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8970 gimple_seq_add_stmt (&bind_body, stmt);
8973 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8974 &bind_body, &dlist, ctx, NULL);
8975 lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8977 gimple_seq_add_stmt (&bind_body, scope_stmt);
8979 gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8981 gimple_omp_set_body (scope_stmt, NULL);
8983 gimple_seq clist = NULL;
8984 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8985 &bind_body, &clist, ctx);
8986 if (clist)
8988 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8989 gcall *g = gimple_build_call (fndecl, 0);
8990 gimple_seq_add_stmt (&bind_body, g);
8991 gimple_seq_add_seq (&bind_body, clist);
8992 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8993 g = gimple_build_call (fndecl, 0);
8994 gimple_seq_add_stmt (&bind_body, g);
8997 gimple_seq_add_seq (&bind_body, dlist);
8999 bind_body = maybe_catch_exception (bind_body);
9001 bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
9002 OMP_CLAUSE_NOWAIT) != NULL_TREE;
9003 gimple *g = gimple_build_omp_return (nowait);
9004 gimple_seq_add_stmt (&bind_body_tail, g);
9005 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
9006 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
9007 if (ctx->record_type)
9009 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
9010 tree clobber = build_clobber (ctx->record_type);
9011 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
9012 clobber), GSI_SAME_STMT);
9014 gimple_seq_add_seq (&bind_body, bind_body_tail);
9016 gimple_bind_set_body (bind, bind_body);
9018 pop_gimplify_context (bind);
9020 gimple_bind_append_vars (bind, ctx->block_vars);
9021 BLOCK_VARS (block) = ctx->block_vars;
9022 if (BLOCK_VARS (block))
9023 TREE_USED (block) = 1;
9025 /* Expand code for an OpenMP master or masked directive. */
9027 static void
9028 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9030 tree block, lab = NULL, x, bfn_decl;
9031 gimple *stmt = gsi_stmt (*gsi_p);
9032 gbind *bind;
9033 location_t loc = gimple_location (stmt);
9034 gimple_seq tseq;
9035 tree filter = integer_zero_node;
9037 push_gimplify_context ();
9039 if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
9041 filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
9042 OMP_CLAUSE_FILTER);
9043 if (filter)
9044 filter = fold_convert (integer_type_node,
9045 OMP_CLAUSE_FILTER_EXPR (filter));
9046 else
9047 filter = integer_zero_node;
9049 block = make_node (BLOCK);
9050 bind = gimple_build_bind (NULL, NULL, block);
9051 gsi_replace (gsi_p, bind, true);
9052 gimple_bind_add_stmt (bind, stmt);
9054 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9055 x = build_call_expr_loc (loc, bfn_decl, 0);
9056 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
9057 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
9058 tseq = NULL;
9059 gimplify_and_add (x, &tseq);
9060 gimple_bind_add_seq (bind, tseq);
9062 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9063 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9064 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9065 gimple_omp_set_body (stmt, NULL);
9067 gimple_bind_add_stmt (bind, gimple_build_label (lab));
9069 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9071 pop_gimplify_context (bind);
9073 gimple_bind_append_vars (bind, ctx->block_vars);
9074 BLOCK_VARS (block) = ctx->block_vars;
9077 /* Helper function for lower_omp_task_reductions. For a specific PASS
9078 find out the current clause it should be processed, or return false
9079 if all have been processed already. */
9081 static inline bool
9082 omp_task_reduction_iterate (int pass, enum tree_code code,
9083 enum omp_clause_code ccode, tree *c, tree *decl,
9084 tree *type, tree *next)
9086 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
9088 if (ccode == OMP_CLAUSE_REDUCTION
9089 && code != OMP_TASKLOOP
9090 && !OMP_CLAUSE_REDUCTION_TASK (*c))
9091 continue;
9092 *decl = OMP_CLAUSE_DECL (*c);
9093 *type = TREE_TYPE (*decl);
9094 if (TREE_CODE (*decl) == MEM_REF)
9096 if (pass != 1)
9097 continue;
9099 else
9101 if (omp_privatize_by_reference (*decl))
9102 *type = TREE_TYPE (*type);
9103 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
9104 continue;
9106 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
9107 return true;
9109 *decl = NULL_TREE;
9110 *type = NULL_TREE;
9111 *next = NULL_TREE;
9112 return false;
9115 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9116 OMP_TASKGROUP only with task modifier). Register mapping of those in
9117 START sequence and reducing them and unregister them in the END sequence. */
9119 static void
9120 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
9121 gimple_seq *start, gimple_seq *end)
9123 enum omp_clause_code ccode
9124 = (code == OMP_TASKGROUP
9125 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
9126 tree cancellable = NULL_TREE;
9127 clauses = omp_task_reductions_find_first (clauses, code, ccode);
9128 if (clauses == NULL_TREE)
9129 return;
9130 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9132 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
9133 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
9134 && outer->cancellable)
9136 cancellable = error_mark_node;
9137 break;
9139 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
9140 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
9141 break;
9143 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
9144 tree *last = &TYPE_FIELDS (record_type);
9145 unsigned cnt = 0;
9146 if (cancellable)
9148 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9149 ptr_type_node);
9150 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9151 integer_type_node);
9152 *last = field;
9153 DECL_CHAIN (field) = ifield;
9154 last = &DECL_CHAIN (ifield);
9155 DECL_CONTEXT (field) = record_type;
9156 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9157 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9158 DECL_CONTEXT (ifield) = record_type;
9159 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9160 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9162 for (int pass = 0; pass < 2; pass++)
9164 tree decl, type, next;
9165 for (tree c = clauses;
9166 omp_task_reduction_iterate (pass, code, ccode,
9167 &c, &decl, &type, &next); c = next)
9169 ++cnt;
9170 tree new_type = type;
9171 if (ctx->outer)
9172 new_type = remap_type (type, &ctx->outer->cb);
9173 tree field
9174 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9175 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9176 new_type);
9177 if (DECL_P (decl) && type == TREE_TYPE (decl))
9179 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9180 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9181 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9183 else
9184 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9185 DECL_CONTEXT (field) = record_type;
9186 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9187 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9188 *last = field;
9189 last = &DECL_CHAIN (field);
9190 tree bfield
9191 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9192 boolean_type_node);
9193 DECL_CONTEXT (bfield) = record_type;
9194 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9195 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9196 *last = bfield;
9197 last = &DECL_CHAIN (bfield);
9200 *last = NULL_TREE;
9201 layout_type (record_type);
9203 /* Build up an array which registers with the runtime all the reductions
9204 and deregisters them at the end. Format documented in libgomp/task.c. */
9205 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9206 tree avar = create_tmp_var_raw (atype);
9207 gimple_add_tmp_var (avar);
9208 TREE_ADDRESSABLE (avar) = 1;
9209 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9210 NULL_TREE, NULL_TREE);
9211 tree t = build_int_cst (pointer_sized_int_node, cnt);
9212 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9213 gimple_seq seq = NULL;
9214 tree sz = fold_convert (pointer_sized_int_node,
9215 TYPE_SIZE_UNIT (record_type));
9216 int cachesz = 64;
9217 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9218 build_int_cst (pointer_sized_int_node, cachesz - 1));
9219 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9220 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9221 ctx->task_reductions.create (1 + cnt);
9222 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9223 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9224 ? sz : NULL_TREE);
9225 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9226 gimple_seq_add_seq (start, seq);
9227 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9228 NULL_TREE, NULL_TREE);
9229 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9230 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9231 NULL_TREE, NULL_TREE);
9232 t = build_int_cst (pointer_sized_int_node,
9233 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9234 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9235 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9236 NULL_TREE, NULL_TREE);
9237 t = build_int_cst (pointer_sized_int_node, -1);
9238 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9239 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9240 NULL_TREE, NULL_TREE);
9241 t = build_int_cst (pointer_sized_int_node, 0);
9242 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9244 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9245 and for each task reduction checks a bool right after the private variable
9246 within that thread's chunk; if the bool is clear, it hasn't been
9247 initialized and thus isn't going to be reduced nor destructed, otherwise
9248 reduce and destruct it. */
9249 tree idx = create_tmp_var (size_type_node);
9250 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9251 tree num_thr_sz = create_tmp_var (size_type_node);
9252 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9253 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9254 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9255 gimple *g;
9256 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9258 /* For worksharing constructs or scope, only perform it in the master
9259 thread, with the exception of cancelled implicit barriers - then only
9260 handle the current thread. */
9261 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9262 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9263 tree thr_num = create_tmp_var (integer_type_node);
9264 g = gimple_build_call (t, 0);
9265 gimple_call_set_lhs (g, thr_num);
9266 gimple_seq_add_stmt (end, g);
9267 if (cancellable)
9269 tree c;
9270 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9271 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9272 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9273 if (code == OMP_FOR)
9274 c = gimple_omp_for_clauses (ctx->stmt);
9275 else if (code == OMP_SECTIONS)
9276 c = gimple_omp_sections_clauses (ctx->stmt);
9277 else /* if (code == OMP_SCOPE) */
9278 c = gimple_omp_scope_clauses (ctx->stmt);
9279 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9280 cancellable = c;
9281 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9282 lab5, lab6);
9283 gimple_seq_add_stmt (end, g);
9284 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9285 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9286 gimple_seq_add_stmt (end, g);
9287 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9288 build_one_cst (TREE_TYPE (idx)));
9289 gimple_seq_add_stmt (end, g);
9290 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9291 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9293 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9294 gimple_seq_add_stmt (end, g);
9295 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9297 if (code != OMP_PARALLEL)
9299 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9300 tree num_thr = create_tmp_var (integer_type_node);
9301 g = gimple_build_call (t, 0);
9302 gimple_call_set_lhs (g, num_thr);
9303 gimple_seq_add_stmt (end, g);
9304 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9305 gimple_seq_add_stmt (end, g);
9306 if (cancellable)
9307 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9309 else
9311 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9312 OMP_CLAUSE__REDUCTEMP_);
9313 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9314 t = fold_convert (size_type_node, t);
9315 gimplify_assign (num_thr_sz, t, end);
9317 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9318 NULL_TREE, NULL_TREE);
9319 tree data = create_tmp_var (pointer_sized_int_node);
9320 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9321 if (code == OMP_TASKLOOP)
9323 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9324 g = gimple_build_cond (NE_EXPR, data,
9325 build_zero_cst (pointer_sized_int_node),
9326 lab1, lab7);
9327 gimple_seq_add_stmt (end, g);
9329 gimple_seq_add_stmt (end, gimple_build_label (lab1));
9330 tree ptr;
9331 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9332 ptr = create_tmp_var (build_pointer_type (record_type));
9333 else
9334 ptr = create_tmp_var (ptr_type_node);
9335 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9337 tree field = TYPE_FIELDS (record_type);
9338 cnt = 0;
9339 if (cancellable)
9340 field = DECL_CHAIN (DECL_CHAIN (field));
9341 for (int pass = 0; pass < 2; pass++)
9343 tree decl, type, next;
9344 for (tree c = clauses;
9345 omp_task_reduction_iterate (pass, code, ccode,
9346 &c, &decl, &type, &next); c = next)
9348 tree var = decl, ref;
9349 if (TREE_CODE (decl) == MEM_REF)
9351 var = TREE_OPERAND (var, 0);
9352 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9353 var = TREE_OPERAND (var, 0);
9354 tree v = var;
9355 if (TREE_CODE (var) == ADDR_EXPR)
9356 var = TREE_OPERAND (var, 0);
9357 else if (INDIRECT_REF_P (var))
9358 var = TREE_OPERAND (var, 0);
9359 tree orig_var = var;
9360 if (is_variable_sized (var))
9362 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9363 var = DECL_VALUE_EXPR (var);
9364 gcc_assert (INDIRECT_REF_P (var));
9365 var = TREE_OPERAND (var, 0);
9366 gcc_assert (DECL_P (var));
9368 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9369 if (orig_var != var)
9370 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9371 else if (TREE_CODE (v) == ADDR_EXPR)
9372 t = build_fold_addr_expr (t);
9373 else if (INDIRECT_REF_P (v))
9374 t = build_fold_indirect_ref (t);
9375 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9377 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9378 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9379 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9381 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9382 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9383 fold_convert (size_type_node,
9384 TREE_OPERAND (decl, 1)));
9386 else
9388 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9389 if (!omp_privatize_by_reference (decl))
9390 t = build_fold_addr_expr (t);
9392 t = fold_convert (pointer_sized_int_node, t);
9393 seq = NULL;
9394 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9395 gimple_seq_add_seq (start, seq);
9396 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9397 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9398 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9399 t = unshare_expr (byte_position (field));
9400 t = fold_convert (pointer_sized_int_node, t);
9401 ctx->task_reduction_map->put (c, cnt);
9402 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9403 ? t : NULL_TREE);
9404 seq = NULL;
9405 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9406 gimple_seq_add_seq (start, seq);
9407 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9408 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9409 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9411 tree bfield = DECL_CHAIN (field);
9412 tree cond;
9413 if (code == OMP_PARALLEL
9414 || code == OMP_FOR
9415 || code == OMP_SECTIONS
9416 || code == OMP_SCOPE)
9417 /* In parallel, worksharing or scope all threads unconditionally
9418 initialize all their task reduction private variables. */
9419 cond = boolean_true_node;
9420 else if (TREE_TYPE (ptr) == ptr_type_node)
9422 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9423 unshare_expr (byte_position (bfield)));
9424 seq = NULL;
9425 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9426 gimple_seq_add_seq (end, seq);
9427 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9428 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9429 build_int_cst (pbool, 0));
9431 else
9432 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9433 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9434 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9435 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9436 tree condv = create_tmp_var (boolean_type_node);
9437 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9438 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9439 lab3, lab4);
9440 gimple_seq_add_stmt (end, g);
9441 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9442 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9444 /* If this reduction doesn't need destruction and parallel
9445 has been cancelled, there is nothing to do for this
9446 reduction, so jump around the merge operation. */
9447 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9448 g = gimple_build_cond (NE_EXPR, cancellable,
9449 build_zero_cst (TREE_TYPE (cancellable)),
9450 lab4, lab5);
9451 gimple_seq_add_stmt (end, g);
9452 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9455 tree new_var;
9456 if (TREE_TYPE (ptr) == ptr_type_node)
9458 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9459 unshare_expr (byte_position (field)));
9460 seq = NULL;
9461 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9462 gimple_seq_add_seq (end, seq);
9463 tree pbool = build_pointer_type (TREE_TYPE (field));
9464 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9465 build_int_cst (pbool, 0));
9467 else
9468 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9469 build_simple_mem_ref (ptr), field, NULL_TREE);
9471 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9472 if (TREE_CODE (decl) != MEM_REF
9473 && omp_privatize_by_reference (decl))
9474 ref = build_simple_mem_ref (ref);
9475 /* reduction(-:var) sums up the partial results, so it acts
9476 identically to reduction(+:var). */
9477 if (rcode == MINUS_EXPR)
9478 rcode = PLUS_EXPR;
9479 if (TREE_CODE (decl) == MEM_REF)
9481 tree type = TREE_TYPE (new_var);
9482 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9483 tree i = create_tmp_var (TREE_TYPE (v));
9484 tree ptype = build_pointer_type (TREE_TYPE (type));
9485 if (DECL_P (v))
9487 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9488 tree vv = create_tmp_var (TREE_TYPE (v));
9489 gimplify_assign (vv, v, start);
9490 v = vv;
9492 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9493 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9494 new_var = build_fold_addr_expr (new_var);
9495 new_var = fold_convert (ptype, new_var);
9496 ref = fold_convert (ptype, ref);
9497 tree m = create_tmp_var (ptype);
9498 gimplify_assign (m, new_var, end);
9499 new_var = m;
9500 m = create_tmp_var (ptype);
9501 gimplify_assign (m, ref, end);
9502 ref = m;
9503 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9504 tree body = create_artificial_label (UNKNOWN_LOCATION);
9505 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9506 gimple_seq_add_stmt (end, gimple_build_label (body));
9507 tree priv = build_simple_mem_ref (new_var);
9508 tree out = build_simple_mem_ref (ref);
9509 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9511 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9512 tree decl_placeholder
9513 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9514 tree lab6 = NULL_TREE;
9515 if (cancellable)
9517 /* If this reduction needs destruction and parallel
9518 has been cancelled, jump around the merge operation
9519 to the destruction. */
9520 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9521 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9522 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9523 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9524 lab6, lab5);
9525 gimple_seq_add_stmt (end, g);
9526 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9528 SET_DECL_VALUE_EXPR (placeholder, out);
9529 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9530 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9531 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9532 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9533 gimple_seq_add_seq (end,
9534 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9535 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9536 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9538 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9539 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9541 if (cancellable)
9542 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9543 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9544 if (x)
9546 gimple_seq tseq = NULL;
9547 gimplify_stmt (&x, &tseq);
9548 gimple_seq_add_seq (end, tseq);
9551 else
9553 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9554 out = unshare_expr (out);
9555 gimplify_assign (out, x, end);
9557 gimple *g
9558 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9559 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9560 gimple_seq_add_stmt (end, g);
9561 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9562 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9563 gimple_seq_add_stmt (end, g);
9564 g = gimple_build_assign (i, PLUS_EXPR, i,
9565 build_int_cst (TREE_TYPE (i), 1));
9566 gimple_seq_add_stmt (end, g);
9567 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9568 gimple_seq_add_stmt (end, g);
9569 gimple_seq_add_stmt (end, gimple_build_label (endl));
9571 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9573 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9574 tree oldv = NULL_TREE;
9575 tree lab6 = NULL_TREE;
9576 if (cancellable)
9578 /* If this reduction needs destruction and parallel
9579 has been cancelled, jump around the merge operation
9580 to the destruction. */
9581 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9582 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9583 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9584 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9585 lab6, lab5);
9586 gimple_seq_add_stmt (end, g);
9587 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9589 if (omp_privatize_by_reference (decl)
9590 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9591 TREE_TYPE (ref)))
9592 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9593 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9594 tree refv = create_tmp_var (TREE_TYPE (ref));
9595 gimplify_assign (refv, ref, end);
9596 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9597 SET_DECL_VALUE_EXPR (placeholder, ref);
9598 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9599 tree d = maybe_lookup_decl (decl, ctx);
9600 gcc_assert (d);
9601 if (DECL_HAS_VALUE_EXPR_P (d))
9602 oldv = DECL_VALUE_EXPR (d);
9603 if (omp_privatize_by_reference (var))
9605 tree v = fold_convert (TREE_TYPE (d),
9606 build_fold_addr_expr (new_var));
9607 SET_DECL_VALUE_EXPR (d, v);
9609 else
9610 SET_DECL_VALUE_EXPR (d, new_var);
9611 DECL_HAS_VALUE_EXPR_P (d) = 1;
9612 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9613 if (oldv)
9614 SET_DECL_VALUE_EXPR (d, oldv);
9615 else
9617 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9618 DECL_HAS_VALUE_EXPR_P (d) = 0;
9620 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9621 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9622 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9623 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9624 if (cancellable)
9625 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9626 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9627 if (x)
9629 gimple_seq tseq = NULL;
9630 gimplify_stmt (&x, &tseq);
9631 gimple_seq_add_seq (end, tseq);
9634 else
9636 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9637 ref = unshare_expr (ref);
9638 gimplify_assign (ref, x, end);
9640 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9641 ++cnt;
9642 field = DECL_CHAIN (bfield);
9646 if (code == OMP_TASKGROUP)
9648 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9649 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9650 gimple_seq_add_stmt (start, g);
9652 else
9654 tree c;
9655 if (code == OMP_FOR)
9656 c = gimple_omp_for_clauses (ctx->stmt);
9657 else if (code == OMP_SECTIONS)
9658 c = gimple_omp_sections_clauses (ctx->stmt);
9659 else if (code == OMP_SCOPE)
9660 c = gimple_omp_scope_clauses (ctx->stmt);
9661 else
9662 c = gimple_omp_taskreg_clauses (ctx->stmt);
9663 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9664 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9665 build_fold_addr_expr (avar));
9666 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9669 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9670 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9671 size_one_node));
9672 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9673 gimple_seq_add_stmt (end, g);
9674 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9675 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9677 enum built_in_function bfn
9678 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9679 t = builtin_decl_explicit (bfn);
9680 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9681 tree arg;
9682 if (cancellable)
9684 arg = create_tmp_var (c_bool_type);
9685 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9686 cancellable));
9688 else
9689 arg = build_int_cst (c_bool_type, 0);
9690 g = gimple_build_call (t, 1, arg);
9692 else
9694 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9695 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9697 gimple_seq_add_stmt (end, g);
9698 if (lab7)
9699 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9700 t = build_constructor (atype, NULL);
9701 TREE_THIS_VOLATILE (t) = 1;
9702 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9705 /* Expand code for an OpenMP taskgroup directive. */
9707 static void
9708 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9710 gimple *stmt = gsi_stmt (*gsi_p);
9711 gcall *x;
9712 gbind *bind;
9713 gimple_seq dseq = NULL;
9714 tree block = make_node (BLOCK);
9716 bind = gimple_build_bind (NULL, NULL, block);
9717 gsi_replace (gsi_p, bind, true);
9718 gimple_bind_add_stmt (bind, stmt);
9720 push_gimplify_context ();
9722 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9724 gimple_bind_add_stmt (bind, x);
9726 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9727 gimple_omp_taskgroup_clauses (stmt),
9728 gimple_bind_body_ptr (bind), &dseq);
9730 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9731 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9732 gimple_omp_set_body (stmt, NULL);
9734 gimple_bind_add_seq (bind, dseq);
9736 pop_gimplify_context (bind);
9738 gimple_bind_append_vars (bind, ctx->block_vars);
9739 BLOCK_VARS (block) = ctx->block_vars;
9743 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9745 static void
9746 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9747 omp_context *ctx)
9749 struct omp_for_data fd;
9750 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9751 return;
9753 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9754 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9755 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9756 if (!fd.ordered)
9757 return;
9759 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9760 tree c = gimple_omp_ordered_clauses (ord_stmt);
9761 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
9762 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
9764 /* Merge depend clauses from multiple adjacent
9765 #pragma omp ordered depend(sink:...) constructs
9766 into one #pragma omp ordered depend(sink:...), so that
9767 we can optimize them together. */
9768 gimple_stmt_iterator gsi = *gsi_p;
9769 gsi_next (&gsi);
9770 while (!gsi_end_p (gsi))
9772 gimple *stmt = gsi_stmt (gsi);
9773 if (is_gimple_debug (stmt)
9774 || gimple_code (stmt) == GIMPLE_NOP)
9776 gsi_next (&gsi);
9777 continue;
9779 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9780 break;
9781 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9782 c = gimple_omp_ordered_clauses (ord_stmt2);
9783 if (c == NULL_TREE
9784 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS
9785 || OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK)
9786 break;
9787 while (*list_p)
9788 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9789 *list_p = c;
9790 gsi_remove (&gsi, true);
9794 /* Canonicalize sink dependence clauses into one folded clause if
9795 possible.
9797 The basic algorithm is to create a sink vector whose first
9798 element is the GCD of all the first elements, and whose remaining
9799 elements are the minimum of the subsequent columns.
9801 We ignore dependence vectors whose first element is zero because
9802 such dependencies are known to be executed by the same thread.
9804 We take into account the direction of the loop, so a minimum
9805 becomes a maximum if the loop is iterating forwards. We also
9806 ignore sink clauses where the loop direction is unknown, or where
9807 the offsets are clearly invalid because they are not a multiple
9808 of the loop increment.
9810 For example:
9812 #pragma omp for ordered(2)
9813 for (i=0; i < N; ++i)
9814 for (j=0; j < M; ++j)
9816 #pragma omp ordered \
9817 depend(sink:i-8,j-2) \
9818 depend(sink:i,j-1) \ // Completely ignored because i+0.
9819 depend(sink:i-4,j-3) \
9820 depend(sink:i-6,j-4)
9821 #pragma omp ordered depend(source)
9824 Folded clause is:
9826 depend(sink:-gcd(8,4,6),-min(2,3,4))
9827 -or-
9828 depend(sink:-2,-2)
9831 /* FIXME: Computing GCD's where the first element is zero is
9832 non-trivial in the presence of collapsed loops. Do this later. */
9833 if (fd.collapse > 1)
9834 return;
9836 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9838 /* wide_int is not a POD so it must be default-constructed. */
9839 for (unsigned i = 0; i != 2 * len - 1; ++i)
9840 new (static_cast<void*>(folded_deps + i)) wide_int ();
9842 tree folded_dep = NULL_TREE;
9843 /* TRUE if the first dimension's offset is negative. */
9844 bool neg_offset_p = false;
9846 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9847 unsigned int i;
9848 while ((c = *list_p) != NULL)
9850 bool remove = false;
9852 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS);
9853 if (OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK)
9854 goto next_ordered_clause;
9856 tree vec;
9857 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9858 vec && TREE_CODE (vec) == TREE_LIST;
9859 vec = TREE_CHAIN (vec), ++i)
9861 gcc_assert (i < len);
9863 /* omp_extract_for_data has canonicalized the condition. */
9864 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9865 || fd.loops[i].cond_code == GT_EXPR);
9866 bool forward = fd.loops[i].cond_code == LT_EXPR;
9867 bool maybe_lexically_later = true;
9869 /* While the committee makes up its mind, bail if we have any
9870 non-constant steps. */
9871 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9872 goto lower_omp_ordered_ret;
9874 tree itype = TREE_TYPE (TREE_VALUE (vec));
9875 if (POINTER_TYPE_P (itype))
9876 itype = sizetype;
9877 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9878 TYPE_PRECISION (itype),
9879 TYPE_SIGN (itype));
9881 /* Ignore invalid offsets that are not multiples of the step. */
9882 if (!wi::multiple_of_p (wi::abs (offset),
9883 wi::abs (wi::to_wide (fd.loops[i].step)),
9884 UNSIGNED))
9886 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9887 "ignoring sink clause with offset that is not "
9888 "a multiple of the loop step");
9889 remove = true;
9890 goto next_ordered_clause;
9893 /* Calculate the first dimension. The first dimension of
9894 the folded dependency vector is the GCD of the first
9895 elements, while ignoring any first elements whose offset
9896 is 0. */
9897 if (i == 0)
9899 /* Ignore dependence vectors whose first dimension is 0. */
9900 if (offset == 0)
9902 remove = true;
9903 goto next_ordered_clause;
9905 else
9907 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9909 error_at (OMP_CLAUSE_LOCATION (c),
9910 "first offset must be in opposite direction "
9911 "of loop iterations");
9912 goto lower_omp_ordered_ret;
9914 if (forward)
9915 offset = -offset;
9916 neg_offset_p = forward;
9917 /* Initialize the first time around. */
9918 if (folded_dep == NULL_TREE)
9920 folded_dep = c;
9921 folded_deps[0] = offset;
9923 else
9924 folded_deps[0] = wi::gcd (folded_deps[0],
9925 offset, UNSIGNED);
9928 /* Calculate minimum for the remaining dimensions. */
9929 else
9931 folded_deps[len + i - 1] = offset;
9932 if (folded_dep == c)
9933 folded_deps[i] = offset;
9934 else if (maybe_lexically_later
9935 && !wi::eq_p (folded_deps[i], offset))
9937 if (forward ^ wi::gts_p (folded_deps[i], offset))
9939 unsigned int j;
9940 folded_dep = c;
9941 for (j = 1; j <= i; j++)
9942 folded_deps[j] = folded_deps[len + j - 1];
9944 else
9945 maybe_lexically_later = false;
9949 gcc_assert (i == len);
9951 remove = true;
9953 next_ordered_clause:
9954 if (remove)
9955 *list_p = OMP_CLAUSE_CHAIN (c);
9956 else
9957 list_p = &OMP_CLAUSE_CHAIN (c);
9960 if (folded_dep)
9962 if (neg_offset_p)
9963 folded_deps[0] = -folded_deps[0];
9965 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9966 if (POINTER_TYPE_P (itype))
9967 itype = sizetype;
9969 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9970 = wide_int_to_tree (itype, folded_deps[0]);
9971 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9972 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9975 lower_omp_ordered_ret:
9977 /* Ordered without clauses is #pragma omp threads, while we want
9978 a nop instead if we remove all clauses. */
9979 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9980 gsi_replace (gsi_p, gimple_build_nop (), true);
9984 /* Expand code for an OpenMP ordered directive. */
9986 static void
9987 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9989 tree block;
9990 gimple *stmt = gsi_stmt (*gsi_p), *g;
9991 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9992 gcall *x;
9993 gbind *bind;
9994 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9995 OMP_CLAUSE_SIMD);
9996 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9997 loop. */
9998 bool maybe_simt
9999 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
10000 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
10001 OMP_CLAUSE_THREADS);
10003 if (gimple_omp_ordered_standalone_p (ord_stmt))
10005 /* FIXME: This is needs to be moved to the expansion to verify various
10006 conditions only testable on cfg with dominators computed, and also
10007 all the depend clauses to be merged still might need to be available
10008 for the runtime checks. */
10009 if (0)
10010 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
10011 return;
10014 push_gimplify_context ();
10016 block = make_node (BLOCK);
10017 bind = gimple_build_bind (NULL, NULL, block);
10018 gsi_replace (gsi_p, bind, true);
10019 gimple_bind_add_stmt (bind, stmt);
10021 if (simd)
10023 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
10024 build_int_cst (NULL_TREE, threads));
10025 cfun->has_simduid_loops = true;
10027 else
10028 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
10030 gimple_bind_add_stmt (bind, x);
10032 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
10033 if (maybe_simt)
10035 counter = create_tmp_var (integer_type_node);
10036 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
10037 gimple_call_set_lhs (g, counter);
10038 gimple_bind_add_stmt (bind, g);
10040 body = create_artificial_label (UNKNOWN_LOCATION);
10041 test = create_artificial_label (UNKNOWN_LOCATION);
10042 gimple_bind_add_stmt (bind, gimple_build_label (body));
10044 tree simt_pred = create_tmp_var (integer_type_node);
10045 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
10046 gimple_call_set_lhs (g, simt_pred);
10047 gimple_bind_add_stmt (bind, g);
10049 tree t = create_artificial_label (UNKNOWN_LOCATION);
10050 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
10051 gimple_bind_add_stmt (bind, g);
10053 gimple_bind_add_stmt (bind, gimple_build_label (t));
10055 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10056 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10057 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10058 gimple_omp_set_body (stmt, NULL);
10060 if (maybe_simt)
10062 gimple_bind_add_stmt (bind, gimple_build_label (test));
10063 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
10064 gimple_bind_add_stmt (bind, g);
10066 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
10067 tree nonneg = create_tmp_var (integer_type_node);
10068 gimple_seq tseq = NULL;
10069 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
10070 gimple_bind_add_seq (bind, tseq);
10072 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
10073 gimple_call_set_lhs (g, nonneg);
10074 gimple_bind_add_stmt (bind, g);
10076 tree end = create_artificial_label (UNKNOWN_LOCATION);
10077 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
10078 gimple_bind_add_stmt (bind, g);
10080 gimple_bind_add_stmt (bind, gimple_build_label (end));
10082 if (simd)
10083 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
10084 build_int_cst (NULL_TREE, threads));
10085 else
10086 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
10088 gimple_bind_add_stmt (bind, x);
10090 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10092 pop_gimplify_context (bind);
10094 gimple_bind_append_vars (bind, ctx->block_vars);
10095 BLOCK_VARS (block) = gimple_bind_vars (bind);
10099 /* Expand code for an OpenMP scan directive and the structured block
10100 before the scan directive. */
10102 static void
10103 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10105 gimple *stmt = gsi_stmt (*gsi_p);
10106 bool has_clauses
10107 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
10108 tree lane = NULL_TREE;
10109 gimple_seq before = NULL;
10110 omp_context *octx = ctx->outer;
10111 gcc_assert (octx);
10112 if (octx->scan_exclusive && !has_clauses)
10114 gimple_stmt_iterator gsi2 = *gsi_p;
10115 gsi_next (&gsi2);
10116 gimple *stmt2 = gsi_stmt (gsi2);
10117 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10118 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10119 the one with exclusive clause(s), comes first. */
10120 if (stmt2
10121 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
10122 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
10124 gsi_remove (gsi_p, false);
10125 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
10126 ctx = maybe_lookup_ctx (stmt2);
10127 gcc_assert (ctx);
10128 lower_omp_scan (gsi_p, ctx);
10129 return;
10133 bool input_phase = has_clauses ^ octx->scan_inclusive;
10134 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10135 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
10136 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10137 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
10138 && !gimple_omp_for_combined_p (octx->stmt));
10139 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
10140 if (is_for_simd && octx->for_simd_scan_phase)
10141 is_simd = false;
10142 if (is_simd)
10143 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
10144 OMP_CLAUSE__SIMDUID_))
10146 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10147 lane = create_tmp_var (unsigned_type_node);
10148 tree t = build_int_cst (integer_type_node,
10149 input_phase ? 1
10150 : octx->scan_inclusive ? 2 : 3);
10151 gimple *g
10152 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10153 gimple_call_set_lhs (g, lane);
10154 gimple_seq_add_stmt (&before, g);
10157 if (is_simd || is_for)
10159 for (tree c = gimple_omp_for_clauses (octx->stmt);
10160 c; c = OMP_CLAUSE_CHAIN (c))
10161 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10162 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10164 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10165 tree var = OMP_CLAUSE_DECL (c);
10166 tree new_var = lookup_decl (var, octx);
10167 tree val = new_var;
10168 tree var2 = NULL_TREE;
10169 tree var3 = NULL_TREE;
10170 tree var4 = NULL_TREE;
10171 tree lane0 = NULL_TREE;
10172 tree new_vard = new_var;
10173 if (omp_privatize_by_reference (var))
10175 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10176 val = new_var;
10178 if (DECL_HAS_VALUE_EXPR_P (new_vard))
10180 val = DECL_VALUE_EXPR (new_vard);
10181 if (new_vard != new_var)
10183 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10184 val = TREE_OPERAND (val, 0);
10186 if (TREE_CODE (val) == ARRAY_REF
10187 && VAR_P (TREE_OPERAND (val, 0)))
10189 tree v = TREE_OPERAND (val, 0);
10190 if (lookup_attribute ("omp simd array",
10191 DECL_ATTRIBUTES (v)))
10193 val = unshare_expr (val);
10194 lane0 = TREE_OPERAND (val, 1);
10195 TREE_OPERAND (val, 1) = lane;
10196 var2 = lookup_decl (v, octx);
10197 if (octx->scan_exclusive)
10198 var4 = lookup_decl (var2, octx);
10199 if (input_phase
10200 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10201 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10202 if (!input_phase)
10204 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10205 var2, lane, NULL_TREE, NULL_TREE);
10206 TREE_THIS_NOTRAP (var2) = 1;
10207 if (octx->scan_exclusive)
10209 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10210 var4, lane, NULL_TREE,
10211 NULL_TREE);
10212 TREE_THIS_NOTRAP (var4) = 1;
10215 else
10216 var2 = val;
10219 gcc_assert (var2);
10221 else
10223 var2 = build_outer_var_ref (var, octx);
10224 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10226 var3 = maybe_lookup_decl (new_vard, octx);
10227 if (var3 == new_vard || var3 == NULL_TREE)
10228 var3 = NULL_TREE;
10229 else if (is_simd && octx->scan_exclusive && !input_phase)
10231 var4 = maybe_lookup_decl (var3, octx);
10232 if (var4 == var3 || var4 == NULL_TREE)
10234 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10236 var4 = var3;
10237 var3 = NULL_TREE;
10239 else
10240 var4 = NULL_TREE;
10244 if (is_simd
10245 && octx->scan_exclusive
10246 && !input_phase
10247 && var4 == NULL_TREE)
10248 var4 = create_tmp_var (TREE_TYPE (val));
10250 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10252 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10253 if (input_phase)
10255 if (var3)
10257 /* If we've added a separate identity element
10258 variable, copy it over into val. */
10259 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10260 var3);
10261 gimplify_and_add (x, &before);
10263 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10265 /* Otherwise, assign to it the identity element. */
10266 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10267 if (is_for)
10268 tseq = copy_gimple_seq_and_replace_locals (tseq);
10269 tree ref = build_outer_var_ref (var, octx);
10270 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10271 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10272 if (x)
10274 if (new_vard != new_var)
10275 val = build_fold_addr_expr_loc (clause_loc, val);
10276 SET_DECL_VALUE_EXPR (new_vard, val);
10278 SET_DECL_VALUE_EXPR (placeholder, ref);
10279 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10280 lower_omp (&tseq, octx);
10281 if (x)
10282 SET_DECL_VALUE_EXPR (new_vard, x);
10283 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10284 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10285 gimple_seq_add_seq (&before, tseq);
10286 if (is_simd)
10287 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10290 else if (is_simd)
10292 tree x;
10293 if (octx->scan_exclusive)
10295 tree v4 = unshare_expr (var4);
10296 tree v2 = unshare_expr (var2);
10297 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10298 gimplify_and_add (x, &before);
10300 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10301 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10302 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10303 tree vexpr = val;
10304 if (x && new_vard != new_var)
10305 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10306 if (x)
10307 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10308 SET_DECL_VALUE_EXPR (placeholder, var2);
10309 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10310 lower_omp (&tseq, octx);
10311 gimple_seq_add_seq (&before, tseq);
10312 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10313 if (x)
10314 SET_DECL_VALUE_EXPR (new_vard, x);
10315 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10316 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10317 if (octx->scan_inclusive)
10319 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10320 var2);
10321 gimplify_and_add (x, &before);
10323 else if (lane0 == NULL_TREE)
10325 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10326 var4);
10327 gimplify_and_add (x, &before);
10331 else
10333 if (input_phase)
10335 /* input phase. Set val to initializer before
10336 the body. */
10337 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10338 gimplify_assign (val, x, &before);
10340 else if (is_simd)
10342 /* scan phase. */
10343 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10344 if (code == MINUS_EXPR)
10345 code = PLUS_EXPR;
10347 tree x = build2 (code, TREE_TYPE (var2),
10348 unshare_expr (var2), unshare_expr (val));
10349 if (octx->scan_inclusive)
10351 gimplify_assign (unshare_expr (var2), x, &before);
10352 gimplify_assign (val, var2, &before);
10354 else
10356 gimplify_assign (unshare_expr (var4),
10357 unshare_expr (var2), &before);
10358 gimplify_assign (var2, x, &before);
10359 if (lane0 == NULL_TREE)
10360 gimplify_assign (val, var4, &before);
10364 if (octx->scan_exclusive && !input_phase && lane0)
10366 tree vexpr = unshare_expr (var4);
10367 TREE_OPERAND (vexpr, 1) = lane0;
10368 if (new_vard != new_var)
10369 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10370 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10374 if (is_simd && !is_for_simd)
10376 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10377 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10378 gsi_replace (gsi_p, gimple_build_nop (), true);
10379 return;
10381 lower_omp (gimple_omp_body_ptr (stmt), octx);
10382 if (before)
10384 gimple_stmt_iterator gsi = gsi_start (*gimple_omp_body_ptr (stmt));
10385 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10390 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10391 substitution of a couple of function calls. But in the NAMED case,
10392 requires that languages coordinate a symbol name. It is therefore
10393 best put here in common code. */
10395 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10397 static void
10398 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10400 tree block;
10401 tree name, lock, unlock;
10402 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10403 gbind *bind;
10404 location_t loc = gimple_location (stmt);
10405 gimple_seq tbody;
10407 name = gimple_omp_critical_name (stmt);
10408 if (name)
10410 tree decl;
10412 if (!critical_name_mutexes)
10413 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10415 tree *n = critical_name_mutexes->get (name);
10416 if (n == NULL)
10418 char *new_str;
10420 decl = create_tmp_var_raw (ptr_type_node);
10422 new_str = ACONCAT ((".gomp_critical_user_",
10423 IDENTIFIER_POINTER (name), NULL));
10424 DECL_NAME (decl) = get_identifier (new_str);
10425 TREE_PUBLIC (decl) = 1;
10426 TREE_STATIC (decl) = 1;
10427 DECL_COMMON (decl) = 1;
10428 DECL_ARTIFICIAL (decl) = 1;
10429 DECL_IGNORED_P (decl) = 1;
10431 varpool_node::finalize_decl (decl);
10433 critical_name_mutexes->put (name, decl);
10435 else
10436 decl = *n;
10438 /* If '#pragma omp critical' is inside offloaded region or
10439 inside function marked as offloadable, the symbol must be
10440 marked as offloadable too. */
10441 omp_context *octx;
10442 if (cgraph_node::get (current_function_decl)->offloadable)
10443 varpool_node::get_create (decl)->offloadable = 1;
10444 else
10445 for (octx = ctx->outer; octx; octx = octx->outer)
10446 if (is_gimple_omp_offloaded (octx->stmt))
10448 varpool_node::get_create (decl)->offloadable = 1;
10449 break;
10452 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10453 lock = build_call_expr_loc (loc, lock, 1,
10454 build_fold_addr_expr_loc (loc, decl));
10456 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10457 unlock = build_call_expr_loc (loc, unlock, 1,
10458 build_fold_addr_expr_loc (loc, decl));
10460 else
10462 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10463 lock = build_call_expr_loc (loc, lock, 0);
10465 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10466 unlock = build_call_expr_loc (loc, unlock, 0);
10469 push_gimplify_context ();
10471 block = make_node (BLOCK);
10472 bind = gimple_build_bind (NULL, NULL, block);
10473 gsi_replace (gsi_p, bind, true);
10474 gimple_bind_add_stmt (bind, stmt);
10476 tbody = gimple_bind_body (bind);
10477 gimplify_and_add (lock, &tbody);
10478 gimple_bind_set_body (bind, tbody);
10480 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10481 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10482 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10483 gimple_omp_set_body (stmt, NULL);
10485 tbody = gimple_bind_body (bind);
10486 gimplify_and_add (unlock, &tbody);
10487 gimple_bind_set_body (bind, tbody);
10489 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10491 pop_gimplify_context (bind);
10492 gimple_bind_append_vars (bind, ctx->block_vars);
10493 BLOCK_VARS (block) = gimple_bind_vars (bind);
10496 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10497 for a lastprivate clause. Given a loop control predicate of (V
10498 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10499 is appended to *DLIST, iterator initialization is appended to
10500 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10501 to be emitted in a critical section. */
10503 static void
10504 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10505 gimple_seq *dlist, gimple_seq *clist,
10506 struct omp_context *ctx)
10508 tree clauses, cond, vinit;
10509 enum tree_code cond_code;
10510 gimple_seq stmts;
10512 cond_code = fd->loop.cond_code;
10513 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10515 /* When possible, use a strict equality expression. This can let VRP
10516 type optimizations deduce the value and remove a copy. */
10517 if (tree_fits_shwi_p (fd->loop.step))
10519 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10520 if (step == 1 || step == -1)
10521 cond_code = EQ_EXPR;
10524 tree n2 = fd->loop.n2;
10525 if (fd->collapse > 1
10526 && TREE_CODE (n2) != INTEGER_CST
10527 && gimple_omp_for_combined_into_p (fd->for_stmt))
10529 struct omp_context *taskreg_ctx = NULL;
10530 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10532 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10533 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10534 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10536 if (gimple_omp_for_combined_into_p (gfor))
10538 gcc_assert (ctx->outer->outer
10539 && is_parallel_ctx (ctx->outer->outer));
10540 taskreg_ctx = ctx->outer->outer;
10542 else
10544 struct omp_for_data outer_fd;
10545 omp_extract_for_data (gfor, &outer_fd, NULL);
10546 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10549 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10550 taskreg_ctx = ctx->outer->outer;
10552 else if (is_taskreg_ctx (ctx->outer))
10553 taskreg_ctx = ctx->outer;
10554 if (taskreg_ctx)
10556 int i;
10557 tree taskreg_clauses
10558 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10559 tree innerc = omp_find_clause (taskreg_clauses,
10560 OMP_CLAUSE__LOOPTEMP_);
10561 gcc_assert (innerc);
10562 int count = fd->collapse;
10563 if (fd->non_rect
10564 && fd->last_nonrect == fd->first_nonrect + 1)
10565 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10566 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10567 count += 4;
10568 for (i = 0; i < count; i++)
10570 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10571 OMP_CLAUSE__LOOPTEMP_);
10572 gcc_assert (innerc);
10574 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10575 OMP_CLAUSE__LOOPTEMP_);
10576 if (innerc)
10577 n2 = fold_convert (TREE_TYPE (n2),
10578 lookup_decl (OMP_CLAUSE_DECL (innerc),
10579 taskreg_ctx));
10582 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10584 clauses = gimple_omp_for_clauses (fd->for_stmt);
10585 stmts = NULL;
10586 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10587 if (!gimple_seq_empty_p (stmts))
10589 gimple_seq_add_seq (&stmts, *dlist);
10590 *dlist = stmts;
10592 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10593 vinit = fd->loop.n1;
10594 if (cond_code == EQ_EXPR
10595 && tree_fits_shwi_p (fd->loop.n2)
10596 && ! integer_zerop (fd->loop.n2))
10597 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10598 else
10599 vinit = unshare_expr (vinit);
10601 /* Initialize the iterator variable, so that threads that don't execute
10602 any iterations don't execute the lastprivate clauses by accident. */
10603 gimplify_assign (fd->loop.v, vinit, body_p);
10607 /* OpenACC privatization.
10609 Or, in other words, *sharing* at the respective OpenACC level of
10610 parallelism.
10612 From a correctness perspective, a non-addressable variable can't be accessed
10613 outside the current thread, so it can go in a (faster than shared memory)
10614 register -- though that register may need to be broadcast in some
10615 circumstances. A variable can only meaningfully be "shared" across workers
10616 or vector lanes if its address is taken, e.g. by a call to an atomic
10617 builtin.
10619 From an optimisation perspective, the answer might be fuzzier: maybe
10620 sometimes, using shared memory directly would be faster than
10621 broadcasting. */
10623 static void
10624 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10625 const location_t loc, const tree c,
10626 const tree decl)
10628 const dump_user_location_t d_u_loc
10629 = dump_user_location_t::from_location_t (loc);
10630 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10631 #if __GNUC__ >= 10
10632 # pragma GCC diagnostic push
10633 # pragma GCC diagnostic ignored "-Wformat"
10634 #endif
10635 dump_printf_loc (l_dump_flags, d_u_loc,
10636 "variable %<%T%> ", decl);
10637 #if __GNUC__ >= 10
10638 # pragma GCC diagnostic pop
10639 #endif
10640 if (c)
10641 dump_printf (l_dump_flags,
10642 "in %qs clause ",
10643 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10644 else
10645 dump_printf (l_dump_flags,
10646 "declared in block ");
10649 static bool
10650 oacc_privatization_candidate_p (const location_t loc, const tree c,
10651 const tree decl)
10653 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10655 /* There is some differentiation depending on block vs. clause. */
10656 bool block = !c;
10658 bool res = true;
10660 if (res && !VAR_P (decl))
10662 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10663 privatized into a new VAR_DECL. */
10664 gcc_checking_assert (TREE_CODE (decl) != PARM_DECL);
10666 res = false;
10668 if (dump_enabled_p ())
10670 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10671 dump_printf (l_dump_flags,
10672 "potentially has improper OpenACC privatization level: %qs\n",
10673 get_tree_code_name (TREE_CODE (decl)));
10677 if (res && block && TREE_STATIC (decl))
10679 res = false;
10681 if (dump_enabled_p ())
10683 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10684 dump_printf (l_dump_flags,
10685 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10686 "static");
10690 if (res && block && DECL_EXTERNAL (decl))
10692 res = false;
10694 if (dump_enabled_p ())
10696 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10697 dump_printf (l_dump_flags,
10698 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10699 "external");
10703 if (res && !TREE_ADDRESSABLE (decl))
10705 res = false;
10707 if (dump_enabled_p ())
10709 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10710 dump_printf (l_dump_flags,
10711 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10712 "not addressable");
10716 /* If an artificial variable has been added to a bind, e.g.
10717 a compiler-generated temporary structure used by the Fortran front-end, do
10718 not consider it as a privatization candidate. Note that variables on
10719 the stack are private per-thread by default: making them "gang-private"
10720 for OpenACC actually means to share a single instance of a variable
10721 amongst all workers and threads spawned within each gang.
10722 At present, no compiler-generated artificial variables require such
10723 sharing semantics, so this is safe. */
10725 if (res && block && DECL_ARTIFICIAL (decl))
10727 res = false;
10729 if (dump_enabled_p ())
10731 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10732 dump_printf (l_dump_flags,
10733 "isn%'t candidate for adjusting OpenACC privatization "
10734 "level: %s\n", "artificial");
10738 if (res)
10740 if (dump_enabled_p ())
10742 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10743 dump_printf (l_dump_flags,
10744 "is candidate for adjusting OpenACC privatization level\n");
10748 if (dump_file && (dump_flags & TDF_DETAILS))
10750 print_generic_decl (dump_file, decl, dump_flags);
10751 fprintf (dump_file, "\n");
10754 return res;
10757 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10758 CTX. */
10760 static void
10761 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10763 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10764 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10766 tree decl = OMP_CLAUSE_DECL (c);
10768 tree new_decl = lookup_decl (decl, ctx);
10770 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c,
10771 new_decl))
10772 continue;
10774 gcc_checking_assert
10775 (!ctx->oacc_privatization_candidates.contains (new_decl));
10776 ctx->oacc_privatization_candidates.safe_push (new_decl);
10780 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10781 CTX. */
10783 static void
10784 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10786 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10788 tree new_decl = lookup_decl (decl, ctx);
10789 gcc_checking_assert (new_decl == decl);
10791 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL,
10792 new_decl))
10793 continue;
10795 gcc_checking_assert
10796 (!ctx->oacc_privatization_candidates.contains (new_decl));
10797 ctx->oacc_privatization_candidates.safe_push (new_decl);
10801 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10803 static tree
10804 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10805 struct walk_stmt_info *wi)
10807 gimple *stmt = gsi_stmt (*gsi_p);
10809 *handled_ops_p = true;
10810 switch (gimple_code (stmt))
10812 WALK_SUBSTMTS;
10814 case GIMPLE_OMP_FOR:
10815 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10816 && gimple_omp_for_combined_into_p (stmt))
10817 *handled_ops_p = false;
10818 break;
10820 case GIMPLE_OMP_SCAN:
10821 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10822 return integer_zero_node;
10823 default:
10824 break;
10826 return NULL;
10829 /* Helper function for lower_omp_for, add transformations for a worksharing
10830 loop with scan directives inside of it.
10831 For worksharing loop not combined with simd, transform:
10832 #pragma omp for reduction(inscan,+:r) private(i)
10833 for (i = 0; i < n; i = i + 1)
10836 update (r);
10838 #pragma omp scan inclusive(r)
10840 use (r);
10844 into two worksharing loops + code to merge results:
10846 num_threads = omp_get_num_threads ();
10847 thread_num = omp_get_thread_num ();
10848 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10849 <D.2099>:
10850 var2 = r;
10851 goto <D.2101>;
10852 <D.2100>:
10853 // For UDRs this is UDR init, or if ctors are needed, copy from
10854 // var3 that has been constructed to contain the neutral element.
10855 var2 = 0;
10856 <D.2101>:
10857 ivar = 0;
10858 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10859 // a shared array with num_threads elements and rprivb to a local array
10860 // number of elements equal to the number of (contiguous) iterations the
10861 // current thread will perform. controlb and controlp variables are
10862 // temporaries to handle deallocation of rprivb at the end of second
10863 // GOMP_FOR.
10864 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10865 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10866 for (i = 0; i < n; i = i + 1)
10869 // For UDRs this is UDR init or copy from var3.
10870 r = 0;
10871 // This is the input phase from user code.
10872 update (r);
10875 // For UDRs this is UDR merge.
10876 var2 = var2 + r;
10877 // Rather than handing it over to the user, save to local thread's
10878 // array.
10879 rprivb[ivar] = var2;
10880 // For exclusive scan, the above two statements are swapped.
10881 ivar = ivar + 1;
10884 // And remember the final value from this thread's into the shared
10885 // rpriva array.
10886 rpriva[(sizetype) thread_num] = var2;
10887 // If more than one thread, compute using Work-Efficient prefix sum
10888 // the inclusive parallel scan of the rpriva array.
10889 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10890 <D.2102>:
10891 GOMP_barrier ();
10892 down = 0;
10893 k = 1;
10894 num_threadsu = (unsigned int) num_threads;
10895 thread_numup1 = (unsigned int) thread_num + 1;
10896 <D.2108>:
10897 twok = k << 1;
10898 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10899 <D.2110>:
10900 down = 4294967295;
10901 k = k >> 1;
10902 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10903 <D.2112>:
10904 k = k >> 1;
10905 <D.2111>:
10906 twok = k << 1;
10907 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10908 mul = REALPART_EXPR <cplx>;
10909 ovf = IMAGPART_EXPR <cplx>;
10910 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10911 <D.2116>:
10912 andv = k & down;
10913 andvm1 = andv + 4294967295;
10914 l = mul + andvm1;
10915 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10916 <D.2120>:
10917 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10918 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10919 rpriva[l] = rpriva[l - k] + rpriva[l];
10920 <D.2117>:
10921 if (down == 0) goto <D.2121>; else goto <D.2122>;
10922 <D.2121>:
10923 k = k << 1;
10924 goto <D.2123>;
10925 <D.2122>:
10926 k = k >> 1;
10927 <D.2123>:
10928 GOMP_barrier ();
10929 if (k != 0) goto <D.2108>; else goto <D.2103>;
10930 <D.2103>:
10931 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10932 <D.2124>:
10933 // For UDRs this is UDR init or copy from var3.
10934 var2 = 0;
10935 goto <D.2126>;
10936 <D.2125>:
10937 var2 = rpriva[thread_num - 1];
10938 <D.2126>:
10939 ivar = 0;
10940 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10941 reduction(inscan,+:r) private(i)
10942 for (i = 0; i < n; i = i + 1)
10945 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10946 r = var2 + rprivb[ivar];
10949 // This is the scan phase from user code.
10950 use (r);
10951 // Plus a bump of the iterator.
10952 ivar = ivar + 1;
10954 } */
10956 static void
10957 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10958 struct omp_for_data *fd, omp_context *ctx)
10960 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10961 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10963 gimple_seq body = gimple_omp_body (stmt);
10964 gimple_stmt_iterator input1_gsi = gsi_none ();
10965 struct walk_stmt_info wi;
10966 memset (&wi, 0, sizeof (wi));
10967 wi.val_only = true;
10968 wi.info = (void *) &input1_gsi;
10969 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10970 gcc_assert (!gsi_end_p (input1_gsi));
10972 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10973 gimple_stmt_iterator gsi = input1_gsi;
10974 gsi_next (&gsi);
10975 gimple_stmt_iterator scan1_gsi = gsi;
10976 gimple *scan_stmt1 = gsi_stmt (gsi);
10977 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10979 gimple_seq input_body = gimple_omp_body (input_stmt1);
10980 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10981 gimple_omp_set_body (input_stmt1, NULL);
10982 gimple_omp_set_body (scan_stmt1, NULL);
10983 gimple_omp_set_body (stmt, NULL);
10985 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10986 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10987 gimple_omp_set_body (stmt, body);
10988 gimple_omp_set_body (input_stmt1, input_body);
10990 gimple_stmt_iterator input2_gsi = gsi_none ();
10991 memset (&wi, 0, sizeof (wi));
10992 wi.val_only = true;
10993 wi.info = (void *) &input2_gsi;
10994 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10995 gcc_assert (!gsi_end_p (input2_gsi));
10997 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10998 gsi = input2_gsi;
10999 gsi_next (&gsi);
11000 gimple_stmt_iterator scan2_gsi = gsi;
11001 gimple *scan_stmt2 = gsi_stmt (gsi);
11002 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
11003 gimple_omp_set_body (scan_stmt2, scan_body);
11005 gimple_stmt_iterator input3_gsi = gsi_none ();
11006 gimple_stmt_iterator scan3_gsi = gsi_none ();
11007 gimple_stmt_iterator input4_gsi = gsi_none ();
11008 gimple_stmt_iterator scan4_gsi = gsi_none ();
11009 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
11010 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
11011 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
11012 if (is_for_simd)
11014 memset (&wi, 0, sizeof (wi));
11015 wi.val_only = true;
11016 wi.info = (void *) &input3_gsi;
11017 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
11018 gcc_assert (!gsi_end_p (input3_gsi));
11020 input_stmt3 = gsi_stmt (input3_gsi);
11021 gsi = input3_gsi;
11022 gsi_next (&gsi);
11023 scan3_gsi = gsi;
11024 scan_stmt3 = gsi_stmt (gsi);
11025 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
11027 memset (&wi, 0, sizeof (wi));
11028 wi.val_only = true;
11029 wi.info = (void *) &input4_gsi;
11030 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
11031 gcc_assert (!gsi_end_p (input4_gsi));
11033 input_stmt4 = gsi_stmt (input4_gsi);
11034 gsi = input4_gsi;
11035 gsi_next (&gsi);
11036 scan4_gsi = gsi;
11037 scan_stmt4 = gsi_stmt (gsi);
11038 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
11040 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
11041 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
11044 tree num_threads = create_tmp_var (integer_type_node);
11045 tree thread_num = create_tmp_var (integer_type_node);
11046 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
11047 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
11048 gimple *g = gimple_build_call (nthreads_decl, 0);
11049 gimple_call_set_lhs (g, num_threads);
11050 gimple_seq_add_stmt (body_p, g);
11051 g = gimple_build_call (threadnum_decl, 0);
11052 gimple_call_set_lhs (g, thread_num);
11053 gimple_seq_add_stmt (body_p, g);
11055 tree ivar = create_tmp_var (sizetype);
11056 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
11057 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
11058 tree k = create_tmp_var (unsigned_type_node);
11059 tree l = create_tmp_var (unsigned_type_node);
11061 gimple_seq clist = NULL, mdlist = NULL;
11062 gimple_seq thr01_list = NULL, thrn1_list = NULL;
11063 gimple_seq thr02_list = NULL, thrn2_list = NULL;
11064 gimple_seq scan1_list = NULL, input2_list = NULL;
11065 gimple_seq last_list = NULL, reduc_list = NULL;
11066 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11067 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11068 && OMP_CLAUSE_REDUCTION_INSCAN (c))
11070 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11071 tree var = OMP_CLAUSE_DECL (c);
11072 tree new_var = lookup_decl (var, ctx);
11073 tree var3 = NULL_TREE;
11074 tree new_vard = new_var;
11075 if (omp_privatize_by_reference (var))
11076 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
11077 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11079 var3 = maybe_lookup_decl (new_vard, ctx);
11080 if (var3 == new_vard)
11081 var3 = NULL_TREE;
11084 tree ptype = build_pointer_type (TREE_TYPE (new_var));
11085 tree rpriva = create_tmp_var (ptype);
11086 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
11087 OMP_CLAUSE_DECL (nc) = rpriva;
11088 *cp1 = nc;
11089 cp1 = &OMP_CLAUSE_CHAIN (nc);
11091 tree rprivb = create_tmp_var (ptype);
11092 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
11093 OMP_CLAUSE_DECL (nc) = rprivb;
11094 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
11095 *cp1 = nc;
11096 cp1 = &OMP_CLAUSE_CHAIN (nc);
11098 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
11099 if (new_vard != new_var)
11100 TREE_ADDRESSABLE (var2) = 1;
11101 gimple_add_tmp_var (var2);
11103 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
11104 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11105 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11106 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11107 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
11109 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
11110 thread_num, integer_minus_one_node);
11111 x = fold_convert_loc (clause_loc, sizetype, x);
11112 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11113 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11114 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11115 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
11117 x = fold_convert_loc (clause_loc, sizetype, l);
11118 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11119 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11120 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11121 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
11123 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
11124 x = fold_convert_loc (clause_loc, sizetype, x);
11125 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11126 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11127 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11128 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
11130 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
11131 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11132 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
11133 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
11135 tree var4 = is_for_simd ? new_var : var2;
11136 tree var5 = NULL_TREE, var6 = NULL_TREE;
11137 if (is_for_simd)
11139 var5 = lookup_decl (var, input_simd_ctx);
11140 var6 = lookup_decl (var, scan_simd_ctx);
11141 if (new_vard != new_var)
11143 var5 = build_simple_mem_ref_loc (clause_loc, var5);
11144 var6 = build_simple_mem_ref_loc (clause_loc, var6);
11147 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11149 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
11150 tree val = var2;
11152 x = lang_hooks.decls.omp_clause_default_ctor
11153 (c, var2, build_outer_var_ref (var, ctx));
11154 if (x)
11155 gimplify_and_add (x, &clist);
11157 x = build_outer_var_ref (var, ctx);
11158 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
11160 gimplify_and_add (x, &thr01_list);
11162 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
11163 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
11164 if (var3)
11166 x = unshare_expr (var4);
11167 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11168 gimplify_and_add (x, &thrn1_list);
11169 x = unshare_expr (var4);
11170 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11171 gimplify_and_add (x, &thr02_list);
11173 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
11175 /* Otherwise, assign to it the identity element. */
11176 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11177 tseq = copy_gimple_seq_and_replace_locals (tseq);
11178 if (!is_for_simd)
11180 if (new_vard != new_var)
11181 val = build_fold_addr_expr_loc (clause_loc, val);
11182 SET_DECL_VALUE_EXPR (new_vard, val);
11183 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11185 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11186 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11187 lower_omp (&tseq, ctx);
11188 gimple_seq_add_seq (&thrn1_list, tseq);
11189 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11190 lower_omp (&tseq, ctx);
11191 gimple_seq_add_seq (&thr02_list, tseq);
11192 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11193 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11194 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11195 if (y)
11196 SET_DECL_VALUE_EXPR (new_vard, y);
11197 else
11199 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11200 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11204 x = unshare_expr (var4);
11205 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11206 gimplify_and_add (x, &thrn2_list);
11208 if (is_for_simd)
11210 x = unshare_expr (rprivb_ref);
11211 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11212 gimplify_and_add (x, &scan1_list);
11214 else
11216 if (ctx->scan_exclusive)
11218 x = unshare_expr (rprivb_ref);
11219 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11220 gimplify_and_add (x, &scan1_list);
11223 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11224 tseq = copy_gimple_seq_and_replace_locals (tseq);
11225 SET_DECL_VALUE_EXPR (placeholder, var2);
11226 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11227 lower_omp (&tseq, ctx);
11228 gimple_seq_add_seq (&scan1_list, tseq);
11230 if (ctx->scan_inclusive)
11232 x = unshare_expr (rprivb_ref);
11233 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11234 gimplify_and_add (x, &scan1_list);
11238 x = unshare_expr (rpriva_ref);
11239 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11240 unshare_expr (var4));
11241 gimplify_and_add (x, &mdlist);
11243 x = unshare_expr (is_for_simd ? var6 : new_var);
11244 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11245 gimplify_and_add (x, &input2_list);
11247 val = rprivb_ref;
11248 if (new_vard != new_var)
11249 val = build_fold_addr_expr_loc (clause_loc, val);
11251 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11252 tseq = copy_gimple_seq_and_replace_locals (tseq);
11253 SET_DECL_VALUE_EXPR (new_vard, val);
11254 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11255 if (is_for_simd)
11257 SET_DECL_VALUE_EXPR (placeholder, var6);
11258 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11260 else
11261 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11262 lower_omp (&tseq, ctx);
11263 if (y)
11264 SET_DECL_VALUE_EXPR (new_vard, y);
11265 else
11267 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11268 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11270 if (!is_for_simd)
11272 SET_DECL_VALUE_EXPR (placeholder, new_var);
11273 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11274 lower_omp (&tseq, ctx);
11276 gimple_seq_add_seq (&input2_list, tseq);
11278 x = build_outer_var_ref (var, ctx);
11279 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11280 gimplify_and_add (x, &last_list);
11282 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11283 gimplify_and_add (x, &reduc_list);
11284 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11285 tseq = copy_gimple_seq_and_replace_locals (tseq);
11286 val = rprival_ref;
11287 if (new_vard != new_var)
11288 val = build_fold_addr_expr_loc (clause_loc, val);
11289 SET_DECL_VALUE_EXPR (new_vard, val);
11290 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11291 SET_DECL_VALUE_EXPR (placeholder, var2);
11292 lower_omp (&tseq, ctx);
11293 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11294 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11295 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11296 if (y)
11297 SET_DECL_VALUE_EXPR (new_vard, y);
11298 else
11300 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11301 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11303 gimple_seq_add_seq (&reduc_list, tseq);
11304 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11305 gimplify_and_add (x, &reduc_list);
11307 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11308 if (x)
11309 gimplify_and_add (x, dlist);
11311 else
11313 x = build_outer_var_ref (var, ctx);
11314 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11316 x = omp_reduction_init (c, TREE_TYPE (new_var));
11317 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11318 &thrn1_list);
11319 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11321 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11323 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11324 if (code == MINUS_EXPR)
11325 code = PLUS_EXPR;
11327 if (is_for_simd)
11328 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11329 else
11331 if (ctx->scan_exclusive)
11332 gimplify_assign (unshare_expr (rprivb_ref), var2,
11333 &scan1_list);
11334 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11335 gimplify_assign (var2, x, &scan1_list);
11336 if (ctx->scan_inclusive)
11337 gimplify_assign (unshare_expr (rprivb_ref), var2,
11338 &scan1_list);
11341 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11342 &mdlist);
11344 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11345 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11347 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11348 &last_list);
11350 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11351 unshare_expr (rprival_ref));
11352 gimplify_assign (rprival_ref, x, &reduc_list);
11356 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11357 gimple_seq_add_stmt (&scan1_list, g);
11358 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11359 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11360 ? scan_stmt4 : scan_stmt2), g);
11362 tree controlb = create_tmp_var (boolean_type_node);
11363 tree controlp = create_tmp_var (ptr_type_node);
11364 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11365 OMP_CLAUSE_DECL (nc) = controlb;
11366 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11367 *cp1 = nc;
11368 cp1 = &OMP_CLAUSE_CHAIN (nc);
11369 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11370 OMP_CLAUSE_DECL (nc) = controlp;
11371 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11372 *cp1 = nc;
11373 cp1 = &OMP_CLAUSE_CHAIN (nc);
11374 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11375 OMP_CLAUSE_DECL (nc) = controlb;
11376 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11377 *cp2 = nc;
11378 cp2 = &OMP_CLAUSE_CHAIN (nc);
11379 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11380 OMP_CLAUSE_DECL (nc) = controlp;
11381 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11382 *cp2 = nc;
11383 cp2 = &OMP_CLAUSE_CHAIN (nc);
11385 *cp1 = gimple_omp_for_clauses (stmt);
11386 gimple_omp_for_set_clauses (stmt, new_clauses1);
11387 *cp2 = gimple_omp_for_clauses (new_stmt);
11388 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11390 if (is_for_simd)
11392 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11393 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11395 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11396 GSI_SAME_STMT);
11397 gsi_remove (&input3_gsi, true);
11398 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11399 GSI_SAME_STMT);
11400 gsi_remove (&scan3_gsi, true);
11401 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11402 GSI_SAME_STMT);
11403 gsi_remove (&input4_gsi, true);
11404 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11405 GSI_SAME_STMT);
11406 gsi_remove (&scan4_gsi, true);
11408 else
11410 gimple_omp_set_body (scan_stmt1, scan1_list);
11411 gimple_omp_set_body (input_stmt2, input2_list);
11414 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11415 GSI_SAME_STMT);
11416 gsi_remove (&input1_gsi, true);
11417 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11418 GSI_SAME_STMT);
11419 gsi_remove (&scan1_gsi, true);
11420 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11421 GSI_SAME_STMT);
11422 gsi_remove (&input2_gsi, true);
11423 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11424 GSI_SAME_STMT);
11425 gsi_remove (&scan2_gsi, true);
11427 gimple_seq_add_seq (body_p, clist);
11429 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11430 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11431 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11432 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11433 gimple_seq_add_stmt (body_p, g);
11434 g = gimple_build_label (lab1);
11435 gimple_seq_add_stmt (body_p, g);
11436 gimple_seq_add_seq (body_p, thr01_list);
11437 g = gimple_build_goto (lab3);
11438 gimple_seq_add_stmt (body_p, g);
11439 g = gimple_build_label (lab2);
11440 gimple_seq_add_stmt (body_p, g);
11441 gimple_seq_add_seq (body_p, thrn1_list);
11442 g = gimple_build_label (lab3);
11443 gimple_seq_add_stmt (body_p, g);
11445 g = gimple_build_assign (ivar, size_zero_node);
11446 gimple_seq_add_stmt (body_p, g);
11448 gimple_seq_add_stmt (body_p, stmt);
11449 gimple_seq_add_seq (body_p, body);
11450 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11451 fd->loop.v));
11453 g = gimple_build_omp_return (true);
11454 gimple_seq_add_stmt (body_p, g);
11455 gimple_seq_add_seq (body_p, mdlist);
11457 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11458 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11459 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11460 gimple_seq_add_stmt (body_p, g);
11461 g = gimple_build_label (lab1);
11462 gimple_seq_add_stmt (body_p, g);
11464 g = omp_build_barrier (NULL);
11465 gimple_seq_add_stmt (body_p, g);
11467 tree down = create_tmp_var (unsigned_type_node);
11468 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11469 gimple_seq_add_stmt (body_p, g);
11471 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11472 gimple_seq_add_stmt (body_p, g);
11474 tree num_threadsu = create_tmp_var (unsigned_type_node);
11475 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11476 gimple_seq_add_stmt (body_p, g);
11478 tree thread_numu = create_tmp_var (unsigned_type_node);
11479 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11480 gimple_seq_add_stmt (body_p, g);
11482 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11483 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11484 build_int_cst (unsigned_type_node, 1));
11485 gimple_seq_add_stmt (body_p, g);
11487 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11488 g = gimple_build_label (lab3);
11489 gimple_seq_add_stmt (body_p, g);
11491 tree twok = create_tmp_var (unsigned_type_node);
11492 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11493 gimple_seq_add_stmt (body_p, g);
11495 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11496 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11497 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11498 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11499 gimple_seq_add_stmt (body_p, g);
11500 g = gimple_build_label (lab4);
11501 gimple_seq_add_stmt (body_p, g);
11502 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11503 gimple_seq_add_stmt (body_p, g);
11504 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11505 gimple_seq_add_stmt (body_p, g);
11507 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11508 gimple_seq_add_stmt (body_p, g);
11509 g = gimple_build_label (lab6);
11510 gimple_seq_add_stmt (body_p, g);
11512 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11513 gimple_seq_add_stmt (body_p, g);
11515 g = gimple_build_label (lab5);
11516 gimple_seq_add_stmt (body_p, g);
11518 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11519 gimple_seq_add_stmt (body_p, g);
11521 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11522 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11523 gimple_call_set_lhs (g, cplx);
11524 gimple_seq_add_stmt (body_p, g);
11525 tree mul = create_tmp_var (unsigned_type_node);
11526 g = gimple_build_assign (mul, REALPART_EXPR,
11527 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11528 gimple_seq_add_stmt (body_p, g);
11529 tree ovf = create_tmp_var (unsigned_type_node);
11530 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11531 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11532 gimple_seq_add_stmt (body_p, g);
11534 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11535 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11536 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11537 lab7, lab8);
11538 gimple_seq_add_stmt (body_p, g);
11539 g = gimple_build_label (lab7);
11540 gimple_seq_add_stmt (body_p, g);
11542 tree andv = create_tmp_var (unsigned_type_node);
11543 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11544 gimple_seq_add_stmt (body_p, g);
11545 tree andvm1 = create_tmp_var (unsigned_type_node);
11546 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11547 build_minus_one_cst (unsigned_type_node));
11548 gimple_seq_add_stmt (body_p, g);
11550 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11551 gimple_seq_add_stmt (body_p, g);
11553 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11554 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11555 gimple_seq_add_stmt (body_p, g);
11556 g = gimple_build_label (lab9);
11557 gimple_seq_add_stmt (body_p, g);
11558 gimple_seq_add_seq (body_p, reduc_list);
11559 g = gimple_build_label (lab8);
11560 gimple_seq_add_stmt (body_p, g);
11562 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11563 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11564 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11565 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11566 lab10, lab11);
11567 gimple_seq_add_stmt (body_p, g);
11568 g = gimple_build_label (lab10);
11569 gimple_seq_add_stmt (body_p, g);
11570 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11571 gimple_seq_add_stmt (body_p, g);
11572 g = gimple_build_goto (lab12);
11573 gimple_seq_add_stmt (body_p, g);
11574 g = gimple_build_label (lab11);
11575 gimple_seq_add_stmt (body_p, g);
11576 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11577 gimple_seq_add_stmt (body_p, g);
11578 g = gimple_build_label (lab12);
11579 gimple_seq_add_stmt (body_p, g);
11581 g = omp_build_barrier (NULL);
11582 gimple_seq_add_stmt (body_p, g);
11584 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11585 lab3, lab2);
11586 gimple_seq_add_stmt (body_p, g);
11588 g = gimple_build_label (lab2);
11589 gimple_seq_add_stmt (body_p, g);
11591 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11592 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11593 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11594 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11595 gimple_seq_add_stmt (body_p, g);
11596 g = gimple_build_label (lab1);
11597 gimple_seq_add_stmt (body_p, g);
11598 gimple_seq_add_seq (body_p, thr02_list);
11599 g = gimple_build_goto (lab3);
11600 gimple_seq_add_stmt (body_p, g);
11601 g = gimple_build_label (lab2);
11602 gimple_seq_add_stmt (body_p, g);
11603 gimple_seq_add_seq (body_p, thrn2_list);
11604 g = gimple_build_label (lab3);
11605 gimple_seq_add_stmt (body_p, g);
11607 g = gimple_build_assign (ivar, size_zero_node);
11608 gimple_seq_add_stmt (body_p, g);
11609 gimple_seq_add_stmt (body_p, new_stmt);
11610 gimple_seq_add_seq (body_p, new_body);
11612 gimple_seq new_dlist = NULL;
11613 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11614 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11615 tree num_threadsm1 = create_tmp_var (integer_type_node);
11616 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11617 integer_minus_one_node);
11618 gimple_seq_add_stmt (&new_dlist, g);
11619 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11620 gimple_seq_add_stmt (&new_dlist, g);
11621 g = gimple_build_label (lab1);
11622 gimple_seq_add_stmt (&new_dlist, g);
11623 gimple_seq_add_seq (&new_dlist, last_list);
11624 g = gimple_build_label (lab2);
11625 gimple_seq_add_stmt (&new_dlist, g);
11626 gimple_seq_add_seq (&new_dlist, *dlist);
11627 *dlist = new_dlist;
11630 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11631 the addresses of variables to be made private at the surrounding
11632 parallelism level. Such functions appear in the gimple code stream in two
11633 forms, e.g. for a partitioned loop:
11635 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11636 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11637 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11638 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11640 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11641 not as part of a HEAD_MARK sequence:
11643 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11645 For such stand-alone appearances, the 3rd argument is always 0, denoting
11646 gang partitioning. */
11648 static gcall *
11649 lower_oacc_private_marker (omp_context *ctx)
11651 if (ctx->oacc_privatization_candidates.length () == 0)
11652 return NULL;
11654 auto_vec<tree, 5> args;
11656 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11657 args.quick_push (integer_zero_node);
11658 args.quick_push (integer_minus_one_node);
11660 int i;
11661 tree decl;
11662 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11664 gcc_checking_assert (TREE_ADDRESSABLE (decl));
11665 tree addr = build_fold_addr_expr (decl);
11666 args.safe_push (addr);
11669 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11672 /* Lower code for an OMP loop directive. */
11674 static void
11675 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11677 tree *rhs_p, block;
11678 struct omp_for_data fd, *fdp = NULL;
11679 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11680 gbind *new_stmt;
11681 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11682 gimple_seq cnt_list = NULL, clist = NULL;
11683 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11684 size_t i;
11686 push_gimplify_context ();
11688 if (is_gimple_omp_oacc (ctx->stmt))
11689 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11691 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11693 block = make_node (BLOCK);
11694 new_stmt = gimple_build_bind (NULL, NULL, block);
11695 /* Replace at gsi right away, so that 'stmt' is no member
11696 of a sequence anymore as we're going to add to a different
11697 one below. */
11698 gsi_replace (gsi_p, new_stmt, true);
11700 /* Move declaration of temporaries in the loop body before we make
11701 it go away. */
11702 omp_for_body = gimple_omp_body (stmt);
11703 if (!gimple_seq_empty_p (omp_for_body)
11704 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11706 gbind *inner_bind
11707 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11708 tree vars = gimple_bind_vars (inner_bind);
11709 if (is_gimple_omp_oacc (ctx->stmt))
11710 oacc_privatization_scan_decl_chain (ctx, vars);
11711 gimple_bind_append_vars (new_stmt, vars);
11712 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11713 keep them on the inner_bind and it's block. */
11714 gimple_bind_set_vars (inner_bind, NULL_TREE);
11715 if (gimple_bind_block (inner_bind))
11716 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11719 if (gimple_omp_for_combined_into_p (stmt))
11721 omp_extract_for_data (stmt, &fd, NULL);
11722 fdp = &fd;
11724 /* We need two temporaries with fd.loop.v type (istart/iend)
11725 and then (fd.collapse - 1) temporaries with the same
11726 type for count2 ... countN-1 vars if not constant. */
11727 size_t count = 2;
11728 tree type = fd.iter_type;
11729 if (fd.collapse > 1
11730 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11731 count += fd.collapse - 1;
11732 size_t count2 = 0;
11733 tree type2 = NULL_TREE;
11734 bool taskreg_for
11735 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11736 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11737 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11738 tree simtc = NULL;
11739 tree clauses = *pc;
11740 if (fd.collapse > 1
11741 && fd.non_rect
11742 && fd.last_nonrect == fd.first_nonrect + 1
11743 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11744 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11745 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11747 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11748 type2 = TREE_TYPE (v);
11749 count++;
11750 count2 = 3;
11752 if (taskreg_for)
11753 outerc
11754 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11755 OMP_CLAUSE__LOOPTEMP_);
11756 if (ctx->simt_stmt)
11757 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11758 OMP_CLAUSE__LOOPTEMP_);
11759 for (i = 0; i < count + count2; i++)
11761 tree temp;
11762 if (taskreg_for)
11764 gcc_assert (outerc);
11765 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11766 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11767 OMP_CLAUSE__LOOPTEMP_);
11769 else
11771 /* If there are 2 adjacent SIMD stmts, one with _simt_
11772 clause, another without, make sure they have the same
11773 decls in _looptemp_ clauses, because the outer stmt
11774 they are combined into will look up just one inner_stmt. */
11775 if (ctx->simt_stmt)
11776 temp = OMP_CLAUSE_DECL (simtc);
11777 else
11778 temp = create_tmp_var (i >= count ? type2 : type);
11779 insert_decl_map (&ctx->outer->cb, temp, temp);
11781 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11782 OMP_CLAUSE_DECL (*pc) = temp;
11783 pc = &OMP_CLAUSE_CHAIN (*pc);
11784 if (ctx->simt_stmt)
11785 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11786 OMP_CLAUSE__LOOPTEMP_);
11788 *pc = clauses;
11791 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11792 dlist = NULL;
11793 body = NULL;
11794 tree rclauses
11795 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11796 OMP_CLAUSE_REDUCTION);
11797 tree rtmp = NULL_TREE;
11798 if (rclauses)
11800 tree type = build_pointer_type (pointer_sized_int_node);
11801 tree temp = create_tmp_var (type);
11802 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11803 OMP_CLAUSE_DECL (c) = temp;
11804 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11805 gimple_omp_for_set_clauses (stmt, c);
11806 lower_omp_task_reductions (ctx, OMP_FOR,
11807 gimple_omp_for_clauses (stmt),
11808 &tred_ilist, &tred_dlist);
11809 rclauses = c;
11810 rtmp = make_ssa_name (type);
11811 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11814 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11815 ctx);
11817 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11818 fdp);
11819 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11820 gimple_omp_for_pre_body (stmt));
11822 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11824 gcall *private_marker = NULL;
11825 if (is_gimple_omp_oacc (ctx->stmt)
11826 && !gimple_seq_empty_p (omp_for_body))
11827 private_marker = lower_oacc_private_marker (ctx);
11829 /* Lower the header expressions. At this point, we can assume that
11830 the header is of the form:
11832 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11834 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11835 using the .omp_data_s mapping, if needed. */
11836 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11838 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11839 if (TREE_CODE (*rhs_p) == TREE_VEC)
11841 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11842 TREE_VEC_ELT (*rhs_p, 1)
11843 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11844 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11845 TREE_VEC_ELT (*rhs_p, 2)
11846 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11848 else if (!is_gimple_min_invariant (*rhs_p))
11849 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11850 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11851 recompute_tree_invariant_for_addr_expr (*rhs_p);
11853 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11854 if (TREE_CODE (*rhs_p) == TREE_VEC)
11856 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11857 TREE_VEC_ELT (*rhs_p, 1)
11858 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11859 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11860 TREE_VEC_ELT (*rhs_p, 2)
11861 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11863 else if (!is_gimple_min_invariant (*rhs_p))
11864 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11865 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11866 recompute_tree_invariant_for_addr_expr (*rhs_p);
11868 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11869 if (!is_gimple_min_invariant (*rhs_p))
11870 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11872 if (rclauses)
11873 gimple_seq_add_seq (&tred_ilist, cnt_list);
11874 else
11875 gimple_seq_add_seq (&body, cnt_list);
11877 /* Once lowered, extract the bounds and clauses. */
11878 omp_extract_for_data (stmt, &fd, NULL);
11880 if (is_gimple_omp_oacc (ctx->stmt)
11881 && !ctx_in_oacc_kernels_region (ctx))
11882 lower_oacc_head_tail (gimple_location (stmt),
11883 gimple_omp_for_clauses (stmt), private_marker,
11884 &oacc_head, &oacc_tail, ctx);
11886 /* Add OpenACC partitioning and reduction markers just before the loop. */
11887 if (oacc_head)
11888 gimple_seq_add_seq (&body, oacc_head);
11890 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11892 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11893 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11894 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11895 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11897 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11898 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11899 OMP_CLAUSE_LINEAR_STEP (c)
11900 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11901 ctx);
11904 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11905 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11906 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11907 else
11909 gimple_seq_add_stmt (&body, stmt);
11910 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11913 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11914 fd.loop.v));
11916 /* After the loop, add exit clauses. */
11917 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11919 if (clist)
11921 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11922 gcall *g = gimple_build_call (fndecl, 0);
11923 gimple_seq_add_stmt (&body, g);
11924 gimple_seq_add_seq (&body, clist);
11925 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11926 g = gimple_build_call (fndecl, 0);
11927 gimple_seq_add_stmt (&body, g);
11930 if (ctx->cancellable)
11931 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11933 gimple_seq_add_seq (&body, dlist);
11935 if (rclauses)
11937 gimple_seq_add_seq (&tred_ilist, body);
11938 body = tred_ilist;
11941 body = maybe_catch_exception (body);
11943 /* Region exit marker goes at the end of the loop body. */
11944 gimple *g = gimple_build_omp_return (fd.have_nowait);
11945 gimple_seq_add_stmt (&body, g);
11947 gimple_seq_add_seq (&body, tred_dlist);
11949 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11951 if (rclauses)
11952 OMP_CLAUSE_DECL (rclauses) = rtmp;
11954 /* Add OpenACC joining and reduction markers just after the loop. */
11955 if (oacc_tail)
11956 gimple_seq_add_seq (&body, oacc_tail);
11958 pop_gimplify_context (new_stmt);
11960 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11961 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11962 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11963 if (BLOCK_VARS (block))
11964 TREE_USED (block) = 1;
11966 gimple_bind_set_body (new_stmt, body);
11967 gimple_omp_set_body (stmt, NULL);
11968 gimple_omp_for_set_pre_body (stmt, NULL);
11971 /* Callback for walk_stmts. Check if the current statement only contains
11972 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11974 static tree
11975 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11976 bool *handled_ops_p,
11977 struct walk_stmt_info *wi)
11979 int *info = (int *) wi->info;
11980 gimple *stmt = gsi_stmt (*gsi_p);
11982 *handled_ops_p = true;
11983 switch (gimple_code (stmt))
11985 WALK_SUBSTMTS;
11987 case GIMPLE_DEBUG:
11988 break;
11989 case GIMPLE_OMP_FOR:
11990 case GIMPLE_OMP_SECTIONS:
11991 *info = *info == 0 ? 1 : -1;
11992 break;
11993 default:
11994 *info = -1;
11995 break;
11997 return NULL;
12000 struct omp_taskcopy_context
12002 /* This field must be at the beginning, as we do "inheritance": Some
12003 callback functions for tree-inline.cc (e.g., omp_copy_decl)
12004 receive a copy_body_data pointer that is up-casted to an
12005 omp_context pointer. */
12006 copy_body_data cb;
12007 omp_context *ctx;
12010 static tree
12011 task_copyfn_copy_decl (tree var, copy_body_data *cb)
12013 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
12015 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
12016 return create_tmp_var (TREE_TYPE (var));
12018 return var;
12021 static tree
12022 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
12024 tree name, new_fields = NULL, type, f;
12026 type = lang_hooks.types.make_type (RECORD_TYPE);
12027 name = DECL_NAME (TYPE_NAME (orig_type));
12028 name = build_decl (gimple_location (tcctx->ctx->stmt),
12029 TYPE_DECL, name, type);
12030 TYPE_NAME (type) = name;
12032 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
12034 tree new_f = copy_node (f);
12035 DECL_CONTEXT (new_f) = type;
12036 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
12037 TREE_CHAIN (new_f) = new_fields;
12038 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
12039 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
12040 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
12041 &tcctx->cb, NULL);
12042 new_fields = new_f;
12043 tcctx->cb.decl_map->put (f, new_f);
12045 TYPE_FIELDS (type) = nreverse (new_fields);
12046 layout_type (type);
12047 return type;
12050 /* Create task copyfn. */
12052 static void
12053 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
12055 struct function *child_cfun;
12056 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
12057 tree record_type, srecord_type, bind, list;
12058 bool record_needs_remap = false, srecord_needs_remap = false;
12059 splay_tree_node n;
12060 struct omp_taskcopy_context tcctx;
12061 location_t loc = gimple_location (task_stmt);
12062 size_t looptempno = 0;
12064 child_fn = gimple_omp_task_copy_fn (task_stmt);
12065 task_cpyfns.safe_push (task_stmt);
12066 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
12067 gcc_assert (child_cfun->cfg == NULL);
12068 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
12070 /* Reset DECL_CONTEXT on function arguments. */
12071 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
12072 DECL_CONTEXT (t) = child_fn;
12074 /* Populate the function. */
12075 push_gimplify_context ();
12076 push_cfun (child_cfun);
12078 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12079 TREE_SIDE_EFFECTS (bind) = 1;
12080 list = NULL;
12081 DECL_SAVED_TREE (child_fn) = bind;
12082 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
12084 /* Remap src and dst argument types if needed. */
12085 record_type = ctx->record_type;
12086 srecord_type = ctx->srecord_type;
12087 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
12088 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
12090 record_needs_remap = true;
12091 break;
12093 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
12094 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
12096 srecord_needs_remap = true;
12097 break;
12100 if (record_needs_remap || srecord_needs_remap)
12102 memset (&tcctx, '\0', sizeof (tcctx));
12103 tcctx.cb.src_fn = ctx->cb.src_fn;
12104 tcctx.cb.dst_fn = child_fn;
12105 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
12106 gcc_checking_assert (tcctx.cb.src_node);
12107 tcctx.cb.dst_node = tcctx.cb.src_node;
12108 tcctx.cb.src_cfun = ctx->cb.src_cfun;
12109 tcctx.cb.copy_decl = task_copyfn_copy_decl;
12110 tcctx.cb.eh_lp_nr = 0;
12111 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
12112 tcctx.cb.decl_map = new hash_map<tree, tree>;
12113 tcctx.ctx = ctx;
12115 if (record_needs_remap)
12116 record_type = task_copyfn_remap_type (&tcctx, record_type);
12117 if (srecord_needs_remap)
12118 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
12120 else
12121 tcctx.cb.decl_map = NULL;
12123 arg = DECL_ARGUMENTS (child_fn);
12124 TREE_TYPE (arg) = build_pointer_type (record_type);
12125 sarg = DECL_CHAIN (arg);
12126 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
12128 /* First pass: initialize temporaries used in record_type and srecord_type
12129 sizes and field offsets. */
12130 if (tcctx.cb.decl_map)
12131 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12132 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12134 tree *p;
12136 decl = OMP_CLAUSE_DECL (c);
12137 p = tcctx.cb.decl_map->get (decl);
12138 if (p == NULL)
12139 continue;
12140 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12141 sf = (tree) n->value;
12142 sf = *tcctx.cb.decl_map->get (sf);
12143 src = build_simple_mem_ref_loc (loc, sarg);
12144 src = omp_build_component_ref (src, sf);
12145 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
12146 append_to_statement_list (t, &list);
12149 /* Second pass: copy shared var pointers and copy construct non-VLA
12150 firstprivate vars. */
12151 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12152 switch (OMP_CLAUSE_CODE (c))
12154 splay_tree_key key;
12155 case OMP_CLAUSE_SHARED:
12156 decl = OMP_CLAUSE_DECL (c);
12157 key = (splay_tree_key) decl;
12158 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
12159 key = (splay_tree_key) &DECL_UID (decl);
12160 n = splay_tree_lookup (ctx->field_map, key);
12161 if (n == NULL)
12162 break;
12163 f = (tree) n->value;
12164 if (tcctx.cb.decl_map)
12165 f = *tcctx.cb.decl_map->get (f);
12166 n = splay_tree_lookup (ctx->sfield_map, key);
12167 sf = (tree) n->value;
12168 if (tcctx.cb.decl_map)
12169 sf = *tcctx.cb.decl_map->get (sf);
12170 src = build_simple_mem_ref_loc (loc, sarg);
12171 src = omp_build_component_ref (src, sf);
12172 dst = build_simple_mem_ref_loc (loc, arg);
12173 dst = omp_build_component_ref (dst, f);
12174 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12175 append_to_statement_list (t, &list);
12176 break;
12177 case OMP_CLAUSE_REDUCTION:
12178 case OMP_CLAUSE_IN_REDUCTION:
12179 decl = OMP_CLAUSE_DECL (c);
12180 if (TREE_CODE (decl) == MEM_REF)
12182 decl = TREE_OPERAND (decl, 0);
12183 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12184 decl = TREE_OPERAND (decl, 0);
12185 if (TREE_CODE (decl) == INDIRECT_REF
12186 || TREE_CODE (decl) == ADDR_EXPR)
12187 decl = TREE_OPERAND (decl, 0);
12189 key = (splay_tree_key) decl;
12190 n = splay_tree_lookup (ctx->field_map, key);
12191 if (n == NULL)
12192 break;
12193 f = (tree) n->value;
12194 if (tcctx.cb.decl_map)
12195 f = *tcctx.cb.decl_map->get (f);
12196 n = splay_tree_lookup (ctx->sfield_map, key);
12197 sf = (tree) n->value;
12198 if (tcctx.cb.decl_map)
12199 sf = *tcctx.cb.decl_map->get (sf);
12200 src = build_simple_mem_ref_loc (loc, sarg);
12201 src = omp_build_component_ref (src, sf);
12202 if (decl != OMP_CLAUSE_DECL (c)
12203 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12204 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12205 src = build_simple_mem_ref_loc (loc, src);
12206 dst = build_simple_mem_ref_loc (loc, arg);
12207 dst = omp_build_component_ref (dst, f);
12208 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12209 append_to_statement_list (t, &list);
12210 break;
12211 case OMP_CLAUSE__LOOPTEMP_:
12212 /* Fields for first two _looptemp_ clauses are initialized by
12213 GOMP_taskloop*, the rest are handled like firstprivate. */
12214 if (looptempno < 2)
12216 looptempno++;
12217 break;
12219 /* FALLTHRU */
12220 case OMP_CLAUSE__REDUCTEMP_:
12221 case OMP_CLAUSE_FIRSTPRIVATE:
12222 decl = OMP_CLAUSE_DECL (c);
12223 if (is_variable_sized (decl))
12224 break;
12225 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12226 if (n == NULL)
12227 break;
12228 f = (tree) n->value;
12229 if (tcctx.cb.decl_map)
12230 f = *tcctx.cb.decl_map->get (f);
12231 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12232 if (n != NULL)
12234 sf = (tree) n->value;
12235 if (tcctx.cb.decl_map)
12236 sf = *tcctx.cb.decl_map->get (sf);
12237 src = build_simple_mem_ref_loc (loc, sarg);
12238 src = omp_build_component_ref (src, sf);
12239 if (use_pointer_for_field (decl, NULL)
12240 || omp_privatize_by_reference (decl))
12241 src = build_simple_mem_ref_loc (loc, src);
12243 else
12244 src = decl;
12245 dst = build_simple_mem_ref_loc (loc, arg);
12246 dst = omp_build_component_ref (dst, f);
12247 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12248 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12249 else
12251 if (ctx->allocate_map)
12252 if (tree *allocatorp = ctx->allocate_map->get (decl))
12254 tree allocator = *allocatorp;
12255 HOST_WIDE_INT ialign = 0;
12256 if (TREE_CODE (allocator) == TREE_LIST)
12258 ialign = tree_to_uhwi (TREE_VALUE (allocator));
12259 allocator = TREE_PURPOSE (allocator);
12261 if (TREE_CODE (allocator) != INTEGER_CST)
12263 n = splay_tree_lookup (ctx->sfield_map,
12264 (splay_tree_key) allocator);
12265 allocator = (tree) n->value;
12266 if (tcctx.cb.decl_map)
12267 allocator = *tcctx.cb.decl_map->get (allocator);
12268 tree a = build_simple_mem_ref_loc (loc, sarg);
12269 allocator = omp_build_component_ref (a, allocator);
12271 allocator = fold_convert (pointer_sized_int_node, allocator);
12272 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12273 tree align = build_int_cst (size_type_node,
12274 MAX (ialign,
12275 DECL_ALIGN_UNIT (decl)));
12276 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12277 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12278 allocator);
12279 ptr = fold_convert (TREE_TYPE (dst), ptr);
12280 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12281 append_to_statement_list (t, &list);
12282 dst = build_simple_mem_ref_loc (loc, dst);
12284 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12286 append_to_statement_list (t, &list);
12287 break;
12288 case OMP_CLAUSE_PRIVATE:
12289 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12290 break;
12291 decl = OMP_CLAUSE_DECL (c);
12292 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12293 f = (tree) n->value;
12294 if (tcctx.cb.decl_map)
12295 f = *tcctx.cb.decl_map->get (f);
12296 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12297 if (n != NULL)
12299 sf = (tree) n->value;
12300 if (tcctx.cb.decl_map)
12301 sf = *tcctx.cb.decl_map->get (sf);
12302 src = build_simple_mem_ref_loc (loc, sarg);
12303 src = omp_build_component_ref (src, sf);
12304 if (use_pointer_for_field (decl, NULL))
12305 src = build_simple_mem_ref_loc (loc, src);
12307 else
12308 src = decl;
12309 dst = build_simple_mem_ref_loc (loc, arg);
12310 dst = omp_build_component_ref (dst, f);
12311 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12312 append_to_statement_list (t, &list);
12313 break;
12314 default:
12315 break;
12318 /* Last pass: handle VLA firstprivates. */
12319 if (tcctx.cb.decl_map)
12320 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12321 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12323 tree ind, ptr, df;
12325 decl = OMP_CLAUSE_DECL (c);
12326 if (!is_variable_sized (decl))
12327 continue;
12328 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12329 if (n == NULL)
12330 continue;
12331 f = (tree) n->value;
12332 f = *tcctx.cb.decl_map->get (f);
12333 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12334 ind = DECL_VALUE_EXPR (decl);
12335 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12336 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12337 n = splay_tree_lookup (ctx->sfield_map,
12338 (splay_tree_key) TREE_OPERAND (ind, 0));
12339 sf = (tree) n->value;
12340 sf = *tcctx.cb.decl_map->get (sf);
12341 src = build_simple_mem_ref_loc (loc, sarg);
12342 src = omp_build_component_ref (src, sf);
12343 src = build_simple_mem_ref_loc (loc, src);
12344 dst = build_simple_mem_ref_loc (loc, arg);
12345 dst = omp_build_component_ref (dst, f);
12346 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12347 append_to_statement_list (t, &list);
12348 n = splay_tree_lookup (ctx->field_map,
12349 (splay_tree_key) TREE_OPERAND (ind, 0));
12350 df = (tree) n->value;
12351 df = *tcctx.cb.decl_map->get (df);
12352 ptr = build_simple_mem_ref_loc (loc, arg);
12353 ptr = omp_build_component_ref (ptr, df);
12354 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12355 build_fold_addr_expr_loc (loc, dst));
12356 append_to_statement_list (t, &list);
12359 t = build1 (RETURN_EXPR, void_type_node, NULL);
12360 append_to_statement_list (t, &list);
12362 if (tcctx.cb.decl_map)
12363 delete tcctx.cb.decl_map;
12364 pop_gimplify_context (NULL);
12365 BIND_EXPR_BODY (bind) = list;
12366 pop_cfun ();
12369 static void
12370 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12372 tree c, clauses;
12373 gimple *g;
12374 size_t cnt[5] = { 0, 0, 0, 0, 0 }, idx = 2, i;
12376 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12377 gcc_assert (clauses);
12378 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12379 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12380 switch (OMP_CLAUSE_DEPEND_KIND (c))
12382 case OMP_CLAUSE_DEPEND_LAST:
12383 /* Lowering already done at gimplification. */
12384 return;
12385 case OMP_CLAUSE_DEPEND_IN:
12386 cnt[2]++;
12387 break;
12388 case OMP_CLAUSE_DEPEND_OUT:
12389 case OMP_CLAUSE_DEPEND_INOUT:
12390 cnt[0]++;
12391 break;
12392 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12393 cnt[1]++;
12394 break;
12395 case OMP_CLAUSE_DEPEND_DEPOBJ:
12396 cnt[3]++;
12397 break;
12398 case OMP_CLAUSE_DEPEND_INOUTSET:
12399 cnt[4]++;
12400 break;
12401 default:
12402 gcc_unreachable ();
12404 if (cnt[1] || cnt[3] || cnt[4])
12405 idx = 5;
12406 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3] + cnt[4];
12407 size_t inoutidx = total + idx;
12408 tree type = build_array_type_nelts (ptr_type_node, total + idx + 2 * cnt[4]);
12409 tree array = create_tmp_var (type);
12410 TREE_ADDRESSABLE (array) = 1;
12411 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12412 NULL_TREE);
12413 if (idx == 5)
12415 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12416 gimple_seq_add_stmt (iseq, g);
12417 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12418 NULL_TREE);
12420 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12421 gimple_seq_add_stmt (iseq, g);
12422 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12424 r = build4 (ARRAY_REF, ptr_type_node, array,
12425 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12426 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12427 gimple_seq_add_stmt (iseq, g);
12429 for (i = 0; i < 5; i++)
12431 if (cnt[i] == 0)
12432 continue;
12433 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12434 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12435 continue;
12436 else
12438 switch (OMP_CLAUSE_DEPEND_KIND (c))
12440 case OMP_CLAUSE_DEPEND_IN:
12441 if (i != 2)
12442 continue;
12443 break;
12444 case OMP_CLAUSE_DEPEND_OUT:
12445 case OMP_CLAUSE_DEPEND_INOUT:
12446 if (i != 0)
12447 continue;
12448 break;
12449 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12450 if (i != 1)
12451 continue;
12452 break;
12453 case OMP_CLAUSE_DEPEND_DEPOBJ:
12454 if (i != 3)
12455 continue;
12456 break;
12457 case OMP_CLAUSE_DEPEND_INOUTSET:
12458 if (i != 4)
12459 continue;
12460 break;
12461 default:
12462 gcc_unreachable ();
12464 tree t = OMP_CLAUSE_DECL (c);
12465 if (i == 4)
12467 t = build4 (ARRAY_REF, ptr_type_node, array,
12468 size_int (inoutidx), NULL_TREE, NULL_TREE);
12469 t = build_fold_addr_expr (t);
12470 inoutidx += 2;
12472 t = fold_convert (ptr_type_node, t);
12473 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12474 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12475 NULL_TREE, NULL_TREE);
12476 g = gimple_build_assign (r, t);
12477 gimple_seq_add_stmt (iseq, g);
12480 if (cnt[4])
12481 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12482 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12483 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_INOUTSET)
12485 tree t = OMP_CLAUSE_DECL (c);
12486 t = fold_convert (ptr_type_node, t);
12487 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12488 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12489 NULL_TREE, NULL_TREE);
12490 g = gimple_build_assign (r, t);
12491 gimple_seq_add_stmt (iseq, g);
12492 t = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
12493 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12494 NULL_TREE, NULL_TREE);
12495 g = gimple_build_assign (r, t);
12496 gimple_seq_add_stmt (iseq, g);
12499 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12500 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12501 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12502 OMP_CLAUSE_CHAIN (c) = *pclauses;
12503 *pclauses = c;
12504 tree clobber = build_clobber (type);
12505 g = gimple_build_assign (array, clobber);
12506 gimple_seq_add_stmt (oseq, g);
12509 /* Lower the OpenMP parallel or task directive in the current statement
12510 in GSI_P. CTX holds context information for the directive. */
12512 static void
12513 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12515 tree clauses;
12516 tree child_fn, t;
12517 gimple *stmt = gsi_stmt (*gsi_p);
12518 gbind *par_bind, *bind, *dep_bind = NULL;
12519 gimple_seq par_body;
12520 location_t loc = gimple_location (stmt);
12522 clauses = gimple_omp_taskreg_clauses (stmt);
12523 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12524 && gimple_omp_task_taskwait_p (stmt))
12526 par_bind = NULL;
12527 par_body = NULL;
12529 else
12531 par_bind
12532 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12533 par_body = gimple_bind_body (par_bind);
12535 child_fn = ctx->cb.dst_fn;
12536 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12537 && !gimple_omp_parallel_combined_p (stmt))
12539 struct walk_stmt_info wi;
12540 int ws_num = 0;
12542 memset (&wi, 0, sizeof (wi));
12543 wi.info = &ws_num;
12544 wi.val_only = true;
12545 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12546 if (ws_num == 1)
12547 gimple_omp_parallel_set_combined_p (stmt, true);
12549 gimple_seq dep_ilist = NULL;
12550 gimple_seq dep_olist = NULL;
12551 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12552 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12554 push_gimplify_context ();
12555 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12556 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12557 &dep_ilist, &dep_olist);
12560 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12561 && gimple_omp_task_taskwait_p (stmt))
12563 if (dep_bind)
12565 gsi_replace (gsi_p, dep_bind, true);
12566 gimple_bind_add_seq (dep_bind, dep_ilist);
12567 gimple_bind_add_stmt (dep_bind, stmt);
12568 gimple_bind_add_seq (dep_bind, dep_olist);
12569 pop_gimplify_context (dep_bind);
12571 return;
12574 if (ctx->srecord_type)
12575 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12577 gimple_seq tskred_ilist = NULL;
12578 gimple_seq tskred_olist = NULL;
12579 if ((is_task_ctx (ctx)
12580 && gimple_omp_task_taskloop_p (ctx->stmt)
12581 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12582 OMP_CLAUSE_REDUCTION))
12583 || (is_parallel_ctx (ctx)
12584 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12585 OMP_CLAUSE__REDUCTEMP_)))
12587 if (dep_bind == NULL)
12589 push_gimplify_context ();
12590 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12592 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12593 : OMP_PARALLEL,
12594 gimple_omp_taskreg_clauses (ctx->stmt),
12595 &tskred_ilist, &tskred_olist);
12598 push_gimplify_context ();
12600 gimple_seq par_olist = NULL;
12601 gimple_seq par_ilist = NULL;
12602 gimple_seq par_rlist = NULL;
12603 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12604 lower_omp (&par_body, ctx);
12605 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12606 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12608 /* Declare all the variables created by mapping and the variables
12609 declared in the scope of the parallel body. */
12610 record_vars_into (ctx->block_vars, child_fn);
12611 maybe_remove_omp_member_access_dummy_vars (par_bind);
12612 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12614 if (ctx->record_type)
12616 ctx->sender_decl
12617 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12618 : ctx->record_type, ".omp_data_o");
12619 DECL_NAMELESS (ctx->sender_decl) = 1;
12620 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12621 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12624 gimple_seq olist = NULL;
12625 gimple_seq ilist = NULL;
12626 lower_send_clauses (clauses, &ilist, &olist, ctx);
12627 lower_send_shared_vars (&ilist, &olist, ctx);
12629 if (ctx->record_type)
12631 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12632 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12633 clobber));
12636 /* Once all the expansions are done, sequence all the different
12637 fragments inside gimple_omp_body. */
12639 gimple_seq new_body = NULL;
12641 if (ctx->record_type)
12643 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12644 /* fixup_child_record_type might have changed receiver_decl's type. */
12645 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12646 gimple_seq_add_stmt (&new_body,
12647 gimple_build_assign (ctx->receiver_decl, t));
12650 gimple_seq_add_seq (&new_body, par_ilist);
12651 gimple_seq_add_seq (&new_body, par_body);
12652 gimple_seq_add_seq (&new_body, par_rlist);
12653 if (ctx->cancellable)
12654 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12655 gimple_seq_add_seq (&new_body, par_olist);
12656 new_body = maybe_catch_exception (new_body);
12657 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12658 gimple_seq_add_stmt (&new_body,
12659 gimple_build_omp_continue (integer_zero_node,
12660 integer_zero_node));
12661 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12662 gimple_omp_set_body (stmt, new_body);
12664 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12665 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12666 else
12667 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12668 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12669 gimple_bind_add_seq (bind, ilist);
12670 gimple_bind_add_stmt (bind, stmt);
12671 gimple_bind_add_seq (bind, olist);
12673 pop_gimplify_context (NULL);
12675 if (dep_bind)
12677 gimple_bind_add_seq (dep_bind, dep_ilist);
12678 gimple_bind_add_seq (dep_bind, tskred_ilist);
12679 gimple_bind_add_stmt (dep_bind, bind);
12680 gimple_bind_add_seq (dep_bind, tskred_olist);
12681 gimple_bind_add_seq (dep_bind, dep_olist);
12682 pop_gimplify_context (dep_bind);
12686 /* Lower the GIMPLE_OMP_TARGET in the current statement
12687 in GSI_P. CTX holds context information for the directive. */
12689 static void
12690 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12692 tree clauses;
12693 tree child_fn, t, c;
12694 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12695 gbind *tgt_bind, *bind, *dep_bind = NULL;
12696 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12697 location_t loc = gimple_location (stmt);
12698 bool offloaded, data_region;
12699 unsigned int map_cnt = 0;
12700 tree in_reduction_clauses = NULL_TREE;
12702 offloaded = is_gimple_omp_offloaded (stmt);
12703 switch (gimple_omp_target_kind (stmt))
12705 case GF_OMP_TARGET_KIND_REGION:
12706 tree *p, *q;
12707 q = &in_reduction_clauses;
12708 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12709 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12711 *q = *p;
12712 q = &OMP_CLAUSE_CHAIN (*q);
12713 *p = OMP_CLAUSE_CHAIN (*p);
12715 else
12716 p = &OMP_CLAUSE_CHAIN (*p);
12717 *q = NULL_TREE;
12718 *p = in_reduction_clauses;
12719 /* FALLTHRU */
12720 case GF_OMP_TARGET_KIND_UPDATE:
12721 case GF_OMP_TARGET_KIND_ENTER_DATA:
12722 case GF_OMP_TARGET_KIND_EXIT_DATA:
12723 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12724 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12725 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12726 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12727 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12728 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12729 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12730 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12731 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12732 data_region = false;
12733 break;
12734 case GF_OMP_TARGET_KIND_DATA:
12735 case GF_OMP_TARGET_KIND_OACC_DATA:
12736 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12737 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12738 data_region = true;
12739 break;
12740 default:
12741 gcc_unreachable ();
12744 /* Ensure that requires map is written via output_offload_tables, even if only
12745 'target (enter/exit) data' is used in the translation unit. */
12746 if (ENABLE_OFFLOADING && (omp_requires_mask & OMP_REQUIRES_TARGET_USED))
12747 g->have_offload = true;
12749 clauses = gimple_omp_target_clauses (stmt);
12751 gimple_seq dep_ilist = NULL;
12752 gimple_seq dep_olist = NULL;
12753 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12754 if (has_depend || in_reduction_clauses)
12756 push_gimplify_context ();
12757 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12758 if (has_depend)
12759 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12760 &dep_ilist, &dep_olist);
12761 if (in_reduction_clauses)
12762 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12763 ctx, NULL);
12766 tgt_bind = NULL;
12767 tgt_body = NULL;
12768 if (offloaded)
12770 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12771 tgt_body = gimple_bind_body (tgt_bind);
12773 else if (data_region)
12774 tgt_body = gimple_omp_body (stmt);
12775 child_fn = ctx->cb.dst_fn;
12777 push_gimplify_context ();
12778 fplist = NULL;
12780 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12781 switch (OMP_CLAUSE_CODE (c))
12783 tree var, x;
12785 default:
12786 break;
12787 case OMP_CLAUSE_MAP:
12788 #if CHECKING_P
12789 /* First check what we're prepared to handle in the following. */
12790 switch (OMP_CLAUSE_MAP_KIND (c))
12792 case GOMP_MAP_ALLOC:
12793 case GOMP_MAP_TO:
12794 case GOMP_MAP_FROM:
12795 case GOMP_MAP_TOFROM:
12796 case GOMP_MAP_POINTER:
12797 case GOMP_MAP_TO_PSET:
12798 case GOMP_MAP_DELETE:
12799 case GOMP_MAP_RELEASE:
12800 case GOMP_MAP_ALWAYS_TO:
12801 case GOMP_MAP_ALWAYS_FROM:
12802 case GOMP_MAP_ALWAYS_TOFROM:
12803 case GOMP_MAP_FORCE_PRESENT:
12804 case GOMP_MAP_ALWAYS_PRESENT_FROM:
12805 case GOMP_MAP_ALWAYS_PRESENT_TO:
12806 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
12808 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12809 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12810 case GOMP_MAP_STRUCT:
12811 case GOMP_MAP_ALWAYS_POINTER:
12812 case GOMP_MAP_ATTACH:
12813 case GOMP_MAP_DETACH:
12814 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
12815 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
12816 break;
12817 case GOMP_MAP_IF_PRESENT:
12818 case GOMP_MAP_FORCE_ALLOC:
12819 case GOMP_MAP_FORCE_TO:
12820 case GOMP_MAP_FORCE_FROM:
12821 case GOMP_MAP_FORCE_TOFROM:
12822 case GOMP_MAP_FORCE_DEVICEPTR:
12823 case GOMP_MAP_DEVICE_RESIDENT:
12824 case GOMP_MAP_LINK:
12825 case GOMP_MAP_FORCE_DETACH:
12826 gcc_assert (is_gimple_omp_oacc (stmt));
12827 break;
12828 default:
12829 gcc_unreachable ();
12831 #endif
12832 /* FALLTHRU */
12833 case OMP_CLAUSE_TO:
12834 case OMP_CLAUSE_FROM:
12835 oacc_firstprivate:
12836 var = OMP_CLAUSE_DECL (c);
12837 if (!DECL_P (var))
12839 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12840 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12841 && (OMP_CLAUSE_MAP_KIND (c)
12842 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12843 map_cnt++;
12844 continue;
12847 if (DECL_SIZE (var)
12848 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12850 tree var2 = DECL_VALUE_EXPR (var);
12851 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12852 var2 = TREE_OPERAND (var2, 0);
12853 gcc_assert (DECL_P (var2));
12854 var = var2;
12857 if (offloaded
12858 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12859 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12860 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12862 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12864 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12865 && varpool_node::get_create (var)->offloadable)
12866 continue;
12868 tree type = build_pointer_type (TREE_TYPE (var));
12869 tree new_var = lookup_decl (var, ctx);
12870 x = create_tmp_var_raw (type, get_name (new_var));
12871 gimple_add_tmp_var (x);
12872 x = build_simple_mem_ref (x);
12873 SET_DECL_VALUE_EXPR (new_var, x);
12874 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12876 continue;
12879 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12880 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12881 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12882 && is_omp_target (stmt))
12884 gcc_assert (maybe_lookup_field (c, ctx));
12885 map_cnt++;
12886 continue;
12889 if (!maybe_lookup_field (var, ctx))
12890 continue;
12892 /* Don't remap compute constructs' reduction variables, because the
12893 intermediate result must be local to each gang. */
12894 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12895 && is_gimple_omp_oacc (ctx->stmt)
12896 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12898 x = build_receiver_ref (var, true, ctx);
12899 tree new_var = lookup_decl (var, ctx);
12901 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12902 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12903 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12904 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12905 x = build_simple_mem_ref (x);
12906 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12908 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12909 if (omp_privatize_by_reference (new_var)
12910 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12911 || DECL_BY_REFERENCE (var)))
12913 /* Create a local object to hold the instance
12914 value. */
12915 tree type = TREE_TYPE (TREE_TYPE (new_var));
12916 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12917 tree inst = create_tmp_var (type, id);
12918 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12919 x = build_fold_addr_expr (inst);
12921 gimplify_assign (new_var, x, &fplist);
12923 else if (DECL_P (new_var))
12925 SET_DECL_VALUE_EXPR (new_var, x);
12926 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12928 else
12929 gcc_unreachable ();
12931 map_cnt++;
12932 break;
12934 case OMP_CLAUSE_FIRSTPRIVATE:
12935 omp_firstprivate_recv:
12936 gcc_checking_assert (offloaded);
12937 if (is_gimple_omp_oacc (ctx->stmt))
12939 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12940 gcc_checking_assert (!is_oacc_kernels (ctx));
12941 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12942 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12944 goto oacc_firstprivate;
12946 map_cnt++;
12947 var = OMP_CLAUSE_DECL (c);
12948 if (!omp_privatize_by_reference (var)
12949 && !is_gimple_reg_type (TREE_TYPE (var)))
12951 tree new_var = lookup_decl (var, ctx);
12952 if (is_variable_sized (var))
12954 tree pvar = DECL_VALUE_EXPR (var);
12955 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12956 pvar = TREE_OPERAND (pvar, 0);
12957 gcc_assert (DECL_P (pvar));
12958 tree new_pvar = lookup_decl (pvar, ctx);
12959 x = build_fold_indirect_ref (new_pvar);
12960 TREE_THIS_NOTRAP (x) = 1;
12962 else
12963 x = build_receiver_ref (var, true, ctx);
12964 SET_DECL_VALUE_EXPR (new_var, x);
12965 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12967 /* Fortran array descriptors: firstprivate of data + attach. */
12968 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
12969 && lang_hooks.decls.omp_array_data (var, true))
12970 map_cnt += 2;
12971 break;
12973 case OMP_CLAUSE_PRIVATE:
12974 gcc_checking_assert (offloaded);
12975 if (is_gimple_omp_oacc (ctx->stmt))
12977 /* No 'private' clauses on OpenACC 'kernels'. */
12978 gcc_checking_assert (!is_oacc_kernels (ctx));
12979 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12980 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12982 break;
12984 var = OMP_CLAUSE_DECL (c);
12985 if (is_variable_sized (var))
12987 tree new_var = lookup_decl (var, ctx);
12988 tree pvar = DECL_VALUE_EXPR (var);
12989 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12990 pvar = TREE_OPERAND (pvar, 0);
12991 gcc_assert (DECL_P (pvar));
12992 tree new_pvar = lookup_decl (pvar, ctx);
12993 x = build_fold_indirect_ref (new_pvar);
12994 TREE_THIS_NOTRAP (x) = 1;
12995 SET_DECL_VALUE_EXPR (new_var, x);
12996 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12998 break;
13000 case OMP_CLAUSE_USE_DEVICE_PTR:
13001 case OMP_CLAUSE_USE_DEVICE_ADDR:
13002 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13003 case OMP_CLAUSE_IS_DEVICE_PTR:
13004 var = OMP_CLAUSE_DECL (c);
13005 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13007 while (TREE_CODE (var) == INDIRECT_REF
13008 || TREE_CODE (var) == ARRAY_REF)
13009 var = TREE_OPERAND (var, 0);
13010 if (lang_hooks.decls.omp_array_data (var, true))
13011 goto omp_firstprivate_recv;
13013 map_cnt++;
13014 if (is_variable_sized (var))
13016 tree new_var = lookup_decl (var, ctx);
13017 tree pvar = DECL_VALUE_EXPR (var);
13018 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13019 pvar = TREE_OPERAND (pvar, 0);
13020 gcc_assert (DECL_P (pvar));
13021 tree new_pvar = lookup_decl (pvar, ctx);
13022 x = build_fold_indirect_ref (new_pvar);
13023 TREE_THIS_NOTRAP (x) = 1;
13024 SET_DECL_VALUE_EXPR (new_var, x);
13025 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
13027 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13028 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13029 && !omp_privatize_by_reference (var)
13030 && !omp_is_allocatable_or_ptr (var)
13031 && !lang_hooks.decls.omp_array_data (var, true))
13032 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13034 tree new_var = lookup_decl (var, ctx);
13035 tree type = build_pointer_type (TREE_TYPE (var));
13036 x = create_tmp_var_raw (type, get_name (new_var));
13037 gimple_add_tmp_var (x);
13038 x = build_simple_mem_ref (x);
13039 SET_DECL_VALUE_EXPR (new_var, x);
13040 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
13042 else
13044 tree new_var = lookup_decl (var, ctx);
13045 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
13046 gimple_add_tmp_var (x);
13047 SET_DECL_VALUE_EXPR (new_var, x);
13048 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
13050 break;
13053 if (offloaded)
13055 target_nesting_level++;
13056 lower_omp (&tgt_body, ctx);
13057 target_nesting_level--;
13059 else if (data_region)
13060 lower_omp (&tgt_body, ctx);
13062 if (offloaded)
13064 /* Declare all the variables created by mapping and the variables
13065 declared in the scope of the target body. */
13066 record_vars_into (ctx->block_vars, child_fn);
13067 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
13068 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
13071 olist = NULL;
13072 ilist = NULL;
13073 if (ctx->record_type)
13075 ctx->sender_decl
13076 = create_tmp_var (ctx->record_type, ".omp_data_arr");
13077 DECL_NAMELESS (ctx->sender_decl) = 1;
13078 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
13079 t = make_tree_vec (3);
13080 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
13081 TREE_VEC_ELT (t, 1)
13082 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
13083 ".omp_data_sizes");
13084 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
13085 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
13086 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
13087 tree tkind_type = short_unsigned_type_node;
13088 int talign_shift = 8;
13089 TREE_VEC_ELT (t, 2)
13090 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
13091 ".omp_data_kinds");
13092 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
13093 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
13094 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
13095 gimple_omp_target_set_data_arg (stmt, t);
13097 vec<constructor_elt, va_gc> *vsize;
13098 vec<constructor_elt, va_gc> *vkind;
13099 vec_alloc (vsize, map_cnt);
13100 vec_alloc (vkind, map_cnt);
13101 unsigned int map_idx = 0;
13103 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13104 switch (OMP_CLAUSE_CODE (c))
13106 tree ovar, nc, s, purpose, var, x, type;
13107 unsigned int talign;
13109 default:
13110 break;
13112 case OMP_CLAUSE_MAP:
13113 case OMP_CLAUSE_TO:
13114 case OMP_CLAUSE_FROM:
13115 oacc_firstprivate_map:
13116 nc = c;
13117 ovar = OMP_CLAUSE_DECL (c);
13118 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13119 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13120 || (OMP_CLAUSE_MAP_KIND (c)
13121 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13122 break;
13123 if (!DECL_P (ovar))
13125 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13126 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
13128 nc = OMP_CLAUSE_CHAIN (c);
13129 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
13130 == get_base_address (ovar));
13131 ovar = OMP_CLAUSE_DECL (nc);
13133 else
13135 tree x = build_sender_ref (ovar, ctx);
13136 tree v = ovar;
13137 if (in_reduction_clauses
13138 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13139 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13141 v = unshare_expr (v);
13142 tree *p = &v;
13143 while (handled_component_p (*p)
13144 || TREE_CODE (*p) == INDIRECT_REF
13145 || TREE_CODE (*p) == ADDR_EXPR
13146 || TREE_CODE (*p) == MEM_REF
13147 || TREE_CODE (*p) == NON_LVALUE_EXPR)
13148 p = &TREE_OPERAND (*p, 0);
13149 tree d = *p;
13150 if (is_variable_sized (d))
13152 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13153 d = DECL_VALUE_EXPR (d);
13154 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13155 d = TREE_OPERAND (d, 0);
13156 gcc_assert (DECL_P (d));
13158 splay_tree_key key
13159 = (splay_tree_key) &DECL_CONTEXT (d);
13160 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13161 key)->value;
13162 if (d == *p)
13163 *p = nd;
13164 else
13165 *p = build_fold_indirect_ref (nd);
13167 v = build_fold_addr_expr_with_type (v, ptr_type_node);
13168 gimplify_assign (x, v, &ilist);
13169 nc = NULL_TREE;
13172 else
13174 if (DECL_SIZE (ovar)
13175 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
13177 tree ovar2 = DECL_VALUE_EXPR (ovar);
13178 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
13179 ovar2 = TREE_OPERAND (ovar2, 0);
13180 gcc_assert (DECL_P (ovar2));
13181 ovar = ovar2;
13183 if (!maybe_lookup_field (ovar, ctx)
13184 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13185 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13186 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
13187 continue;
13190 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
13191 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
13192 talign = DECL_ALIGN_UNIT (ovar);
13194 var = NULL_TREE;
13195 if (nc)
13197 if (in_reduction_clauses
13198 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13199 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13201 tree d = ovar;
13202 if (is_variable_sized (d))
13204 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13205 d = DECL_VALUE_EXPR (d);
13206 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13207 d = TREE_OPERAND (d, 0);
13208 gcc_assert (DECL_P (d));
13210 splay_tree_key key
13211 = (splay_tree_key) &DECL_CONTEXT (d);
13212 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13213 key)->value;
13214 if (d == ovar)
13215 var = nd;
13216 else
13217 var = build_fold_indirect_ref (nd);
13219 else
13220 var = lookup_decl_in_outer_ctx (ovar, ctx);
13222 if (nc
13223 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13224 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13225 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13226 && is_omp_target (stmt))
13228 x = build_sender_ref (c, ctx);
13229 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13231 else if (nc)
13233 x = build_sender_ref (ovar, ctx);
13235 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13236 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13237 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13238 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13240 gcc_assert (offloaded);
13241 tree avar
13242 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13243 mark_addressable (avar);
13244 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13245 talign = DECL_ALIGN_UNIT (avar);
13246 avar = build_fold_addr_expr (avar);
13247 gimplify_assign (x, avar, &ilist);
13249 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13251 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13252 if (!omp_privatize_by_reference (var))
13254 if (is_gimple_reg (var)
13255 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13256 suppress_warning (var);
13257 var = build_fold_addr_expr (var);
13259 else
13260 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13261 gimplify_assign (x, var, &ilist);
13263 else if (is_gimple_reg (var))
13265 gcc_assert (offloaded);
13266 tree avar = create_tmp_var (TREE_TYPE (var));
13267 mark_addressable (avar);
13268 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13269 if (GOMP_MAP_COPY_TO_P (map_kind)
13270 || map_kind == GOMP_MAP_POINTER
13271 || map_kind == GOMP_MAP_TO_PSET
13272 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13274 /* If we need to initialize a temporary
13275 with VAR because it is not addressable, and
13276 the variable hasn't been initialized yet, then
13277 we'll get a warning for the store to avar.
13278 Don't warn in that case, the mapping might
13279 be implicit. */
13280 suppress_warning (var, OPT_Wuninitialized);
13281 gimplify_assign (avar, var, &ilist);
13283 avar = build_fold_addr_expr (avar);
13284 gimplify_assign (x, avar, &ilist);
13285 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13286 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13287 && !TYPE_READONLY (TREE_TYPE (var)))
13289 x = unshare_expr (x);
13290 x = build_simple_mem_ref (x);
13291 gimplify_assign (var, x, &olist);
13294 else
13296 /* While MAP is handled explicitly by the FE,
13297 for 'target update', only the identified is passed. */
13298 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13299 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13300 && (omp_is_allocatable_or_ptr (var)
13301 && omp_check_optional_argument (var, false)))
13302 var = build_fold_indirect_ref (var);
13303 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13304 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13305 || (!omp_is_allocatable_or_ptr (var)
13306 && !omp_check_optional_argument (var, false)))
13307 var = build_fold_addr_expr (var);
13308 gimplify_assign (x, var, &ilist);
13311 s = NULL_TREE;
13312 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13314 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13315 s = TREE_TYPE (ovar);
13316 if (TREE_CODE (s) == REFERENCE_TYPE
13317 || omp_check_optional_argument (ovar, false))
13318 s = TREE_TYPE (s);
13319 s = TYPE_SIZE_UNIT (s);
13321 else
13322 s = OMP_CLAUSE_SIZE (c);
13323 if (s == NULL_TREE)
13324 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13325 s = fold_convert (size_type_node, s);
13326 purpose = size_int (map_idx++);
13327 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13328 if (TREE_CODE (s) != INTEGER_CST)
13329 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13331 unsigned HOST_WIDE_INT tkind, tkind_zero;
13332 switch (OMP_CLAUSE_CODE (c))
13334 case OMP_CLAUSE_MAP:
13335 tkind = OMP_CLAUSE_MAP_KIND (c);
13336 tkind_zero = tkind;
13337 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13338 switch (tkind)
13340 case GOMP_MAP_ALLOC:
13341 case GOMP_MAP_IF_PRESENT:
13342 case GOMP_MAP_TO:
13343 case GOMP_MAP_FROM:
13344 case GOMP_MAP_TOFROM:
13345 case GOMP_MAP_ALWAYS_TO:
13346 case GOMP_MAP_ALWAYS_FROM:
13347 case GOMP_MAP_ALWAYS_TOFROM:
13348 case GOMP_MAP_ALWAYS_PRESENT_TO:
13349 case GOMP_MAP_ALWAYS_PRESENT_FROM:
13350 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
13351 case GOMP_MAP_RELEASE:
13352 case GOMP_MAP_FORCE_TO:
13353 case GOMP_MAP_FORCE_FROM:
13354 case GOMP_MAP_FORCE_TOFROM:
13355 case GOMP_MAP_FORCE_PRESENT:
13356 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13357 break;
13358 case GOMP_MAP_DELETE:
13359 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13360 default:
13361 break;
13363 if (tkind_zero != tkind)
13365 if (integer_zerop (s))
13366 tkind = tkind_zero;
13367 else if (integer_nonzerop (s))
13368 tkind_zero = tkind;
13370 if (tkind_zero == tkind
13371 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c)
13372 && (((tkind & GOMP_MAP_FLAG_SPECIAL_BITS)
13373 & ~GOMP_MAP_IMPLICIT)
13374 == 0))
13376 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13377 bits are not interfered by other special bit encodings,
13378 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13379 to see. */
13380 tkind |= GOMP_MAP_IMPLICIT;
13381 tkind_zero = tkind;
13383 break;
13384 case OMP_CLAUSE_FIRSTPRIVATE:
13385 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13386 tkind = GOMP_MAP_TO;
13387 tkind_zero = tkind;
13388 break;
13389 case OMP_CLAUSE_TO:
13390 tkind
13391 = (OMP_CLAUSE_MOTION_PRESENT (c)
13392 ? GOMP_MAP_ALWAYS_PRESENT_TO : GOMP_MAP_TO);
13393 tkind_zero = tkind;
13394 break;
13395 case OMP_CLAUSE_FROM:
13396 tkind
13397 = (OMP_CLAUSE_MOTION_PRESENT (c)
13398 ? GOMP_MAP_ALWAYS_PRESENT_FROM : GOMP_MAP_FROM);
13399 tkind_zero = tkind;
13400 break;
13401 default:
13402 gcc_unreachable ();
13404 gcc_checking_assert (tkind
13405 < (HOST_WIDE_INT_C (1U) << talign_shift));
13406 gcc_checking_assert (tkind_zero
13407 < (HOST_WIDE_INT_C (1U) << talign_shift));
13408 talign = ceil_log2 (talign);
13409 tkind |= talign << talign_shift;
13410 tkind_zero |= talign << talign_shift;
13411 gcc_checking_assert (tkind
13412 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13413 gcc_checking_assert (tkind_zero
13414 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13415 if (tkind == tkind_zero)
13416 x = build_int_cstu (tkind_type, tkind);
13417 else
13419 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13420 x = build3 (COND_EXPR, tkind_type,
13421 fold_build2 (EQ_EXPR, boolean_type_node,
13422 unshare_expr (s), size_zero_node),
13423 build_int_cstu (tkind_type, tkind_zero),
13424 build_int_cstu (tkind_type, tkind));
13426 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13427 if (nc && nc != c)
13428 c = nc;
13429 break;
13431 case OMP_CLAUSE_FIRSTPRIVATE:
13432 omp_has_device_addr_descr:
13433 if (is_gimple_omp_oacc (ctx->stmt))
13434 goto oacc_firstprivate_map;
13435 ovar = OMP_CLAUSE_DECL (c);
13436 if (omp_privatize_by_reference (ovar))
13437 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13438 else
13439 talign = DECL_ALIGN_UNIT (ovar);
13440 var = lookup_decl_in_outer_ctx (ovar, ctx);
13441 x = build_sender_ref (ovar, ctx);
13442 tkind = GOMP_MAP_FIRSTPRIVATE;
13443 type = TREE_TYPE (ovar);
13444 if (omp_privatize_by_reference (ovar))
13445 type = TREE_TYPE (type);
13446 if ((INTEGRAL_TYPE_P (type)
13447 && TYPE_PRECISION (type) <= POINTER_SIZE)
13448 || TREE_CODE (type) == POINTER_TYPE)
13450 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13451 tree t = var;
13452 if (omp_privatize_by_reference (var))
13453 t = build_simple_mem_ref (var);
13454 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13455 suppress_warning (var);
13456 if (TREE_CODE (type) != POINTER_TYPE)
13457 t = fold_convert (pointer_sized_int_node, t);
13458 t = fold_convert (TREE_TYPE (x), t);
13459 gimplify_assign (x, t, &ilist);
13461 else if (omp_privatize_by_reference (var))
13462 gimplify_assign (x, var, &ilist);
13463 else if (is_gimple_reg (var))
13465 tree avar = create_tmp_var (TREE_TYPE (var));
13466 mark_addressable (avar);
13467 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13468 suppress_warning (var);
13469 gimplify_assign (avar, var, &ilist);
13470 avar = build_fold_addr_expr (avar);
13471 gimplify_assign (x, avar, &ilist);
13473 else
13475 var = build_fold_addr_expr (var);
13476 gimplify_assign (x, var, &ilist);
13478 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13479 s = size_int (0);
13480 else if (omp_privatize_by_reference (ovar))
13481 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13482 else
13483 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13484 s = fold_convert (size_type_node, s);
13485 purpose = size_int (map_idx++);
13486 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13487 if (TREE_CODE (s) != INTEGER_CST)
13488 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13490 gcc_checking_assert (tkind
13491 < (HOST_WIDE_INT_C (1U) << talign_shift));
13492 talign = ceil_log2 (talign);
13493 tkind |= talign << talign_shift;
13494 gcc_checking_assert (tkind
13495 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13496 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13497 build_int_cstu (tkind_type, tkind));
13498 /* Fortran array descriptors: firstprivate of data + attach. */
13499 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13500 && lang_hooks.decls.omp_array_data (ovar, true))
13502 tree not_null_lb, null_lb, after_lb;
13503 tree var1, var2, size1, size2;
13504 tree present = omp_check_optional_argument (ovar, true);
13505 if (present)
13507 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13508 not_null_lb = create_artificial_label (clause_loc);
13509 null_lb = create_artificial_label (clause_loc);
13510 after_lb = create_artificial_label (clause_loc);
13511 gimple_seq seq = NULL;
13512 present = force_gimple_operand (present, &seq, true,
13513 NULL_TREE);
13514 gimple_seq_add_seq (&ilist, seq);
13515 gimple_seq_add_stmt (&ilist,
13516 gimple_build_cond_from_tree (present,
13517 not_null_lb, null_lb));
13518 gimple_seq_add_stmt (&ilist,
13519 gimple_build_label (not_null_lb));
13521 var1 = lang_hooks.decls.omp_array_data (var, false);
13522 size1 = lang_hooks.decls.omp_array_size (var, &ilist);
13523 var2 = build_fold_addr_expr (x);
13524 if (!POINTER_TYPE_P (TREE_TYPE (var)))
13525 var = build_fold_addr_expr (var);
13526 size2 = fold_build2 (POINTER_DIFF_EXPR, ssizetype,
13527 build_fold_addr_expr (var1), var);
13528 size2 = fold_convert (sizetype, size2);
13529 if (present)
13531 tree tmp = create_tmp_var (TREE_TYPE (var1));
13532 gimplify_assign (tmp, var1, &ilist);
13533 var1 = tmp;
13534 tmp = create_tmp_var (TREE_TYPE (var2));
13535 gimplify_assign (tmp, var2, &ilist);
13536 var2 = tmp;
13537 tmp = create_tmp_var (TREE_TYPE (size1));
13538 gimplify_assign (tmp, size1, &ilist);
13539 size1 = tmp;
13540 tmp = create_tmp_var (TREE_TYPE (size2));
13541 gimplify_assign (tmp, size2, &ilist);
13542 size2 = tmp;
13543 gimple_seq_add_stmt (&ilist, gimple_build_goto (after_lb));
13544 gimple_seq_add_stmt (&ilist, gimple_build_label (null_lb));
13545 gimplify_assign (var1, null_pointer_node, &ilist);
13546 gimplify_assign (var2, null_pointer_node, &ilist);
13547 gimplify_assign (size1, size_zero_node, &ilist);
13548 gimplify_assign (size2, size_zero_node, &ilist);
13549 gimple_seq_add_stmt (&ilist, gimple_build_label (after_lb));
13551 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13552 gimplify_assign (x, var1, &ilist);
13553 tkind = GOMP_MAP_FIRSTPRIVATE;
13554 talign = DECL_ALIGN_UNIT (ovar);
13555 talign = ceil_log2 (talign);
13556 tkind |= talign << talign_shift;
13557 gcc_checking_assert (tkind
13558 <= tree_to_uhwi (
13559 TYPE_MAX_VALUE (tkind_type)));
13560 purpose = size_int (map_idx++);
13561 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size1);
13562 if (TREE_CODE (size1) != INTEGER_CST)
13563 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13564 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13565 build_int_cstu (tkind_type, tkind));
13566 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13567 gimplify_assign (x, var2, &ilist);
13568 tkind = GOMP_MAP_ATTACH;
13569 purpose = size_int (map_idx++);
13570 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size2);
13571 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13572 build_int_cstu (tkind_type, tkind));
13574 break;
13576 case OMP_CLAUSE_USE_DEVICE_PTR:
13577 case OMP_CLAUSE_USE_DEVICE_ADDR:
13578 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13579 case OMP_CLAUSE_IS_DEVICE_PTR:
13580 ovar = OMP_CLAUSE_DECL (c);
13581 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13583 if (lang_hooks.decls.omp_array_data (ovar, true))
13584 goto omp_has_device_addr_descr;
13585 while (TREE_CODE (ovar) == INDIRECT_REF
13586 || TREE_CODE (ovar) == ARRAY_REF)
13587 ovar = TREE_OPERAND (ovar, 0);
13589 var = lookup_decl_in_outer_ctx (ovar, ctx);
13591 if (lang_hooks.decls.omp_array_data (ovar, true))
13593 tkind = ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13594 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13595 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13596 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13598 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13599 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13601 tkind = GOMP_MAP_USE_DEVICE_PTR;
13602 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13604 else
13606 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13607 x = build_sender_ref (ovar, ctx);
13610 if (is_gimple_omp_oacc (ctx->stmt))
13612 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13614 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13615 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13618 type = TREE_TYPE (ovar);
13619 if (lang_hooks.decls.omp_array_data (ovar, true))
13620 var = lang_hooks.decls.omp_array_data (var, false);
13621 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13622 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13623 && !omp_privatize_by_reference (ovar)
13624 && !omp_is_allocatable_or_ptr (ovar))
13625 || TREE_CODE (type) == ARRAY_TYPE)
13626 var = build_fold_addr_expr (var);
13627 else
13629 if (omp_privatize_by_reference (ovar)
13630 || omp_check_optional_argument (ovar, false)
13631 || omp_is_allocatable_or_ptr (ovar))
13633 type = TREE_TYPE (type);
13634 if (POINTER_TYPE_P (type)
13635 && TREE_CODE (type) != ARRAY_TYPE
13636 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13637 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13638 && !omp_is_allocatable_or_ptr (ovar))
13639 || (omp_privatize_by_reference (ovar)
13640 && omp_is_allocatable_or_ptr (ovar))))
13641 var = build_simple_mem_ref (var);
13642 var = fold_convert (TREE_TYPE (x), var);
13645 tree present;
13646 present = omp_check_optional_argument (ovar, true);
13647 if (present)
13649 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13650 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13651 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13652 tree new_x = unshare_expr (x);
13653 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13654 fb_rvalue);
13655 gcond *cond = gimple_build_cond_from_tree (present,
13656 notnull_label,
13657 null_label);
13658 gimple_seq_add_stmt (&ilist, cond);
13659 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13660 gimplify_assign (new_x, null_pointer_node, &ilist);
13661 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13662 gimple_seq_add_stmt (&ilist,
13663 gimple_build_label (notnull_label));
13664 gimplify_assign (x, var, &ilist);
13665 gimple_seq_add_stmt (&ilist,
13666 gimple_build_label (opt_arg_label));
13668 else
13669 gimplify_assign (x, var, &ilist);
13670 s = size_int (0);
13671 purpose = size_int (map_idx++);
13672 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13673 gcc_checking_assert (tkind
13674 < (HOST_WIDE_INT_C (1U) << talign_shift));
13675 gcc_checking_assert (tkind
13676 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13677 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13678 build_int_cstu (tkind_type, tkind));
13679 break;
13682 gcc_assert (map_idx == map_cnt);
13684 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13685 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13686 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13687 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13688 for (int i = 1; i <= 2; i++)
13689 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13691 gimple_seq initlist = NULL;
13692 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13693 TREE_VEC_ELT (t, i)),
13694 &initlist, true, NULL_TREE);
13695 gimple_seq_add_seq (&ilist, initlist);
13697 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13698 gimple_seq_add_stmt (&olist,
13699 gimple_build_assign (TREE_VEC_ELT (t, i),
13700 clobber));
13702 else if (omp_maybe_offloaded_ctx (ctx->outer))
13704 tree id = get_identifier ("omp declare target");
13705 tree decl = TREE_VEC_ELT (t, i);
13706 DECL_ATTRIBUTES (decl)
13707 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13708 varpool_node *node = varpool_node::get (decl);
13709 if (node)
13711 node->offloadable = 1;
13712 if (ENABLE_OFFLOADING)
13714 g->have_offload = true;
13715 vec_safe_push (offload_vars, t);
13720 tree clobber = build_clobber (ctx->record_type);
13721 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13722 clobber));
13725 /* Once all the expansions are done, sequence all the different
13726 fragments inside gimple_omp_body. */
13728 new_body = NULL;
13730 if (offloaded
13731 && ctx->record_type)
13733 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13734 /* fixup_child_record_type might have changed receiver_decl's type. */
13735 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13736 gimple_seq_add_stmt (&new_body,
13737 gimple_build_assign (ctx->receiver_decl, t));
13739 gimple_seq_add_seq (&new_body, fplist);
13741 if (offloaded || data_region)
13743 tree prev = NULL_TREE;
13744 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13745 switch (OMP_CLAUSE_CODE (c))
13747 tree var, x;
13748 default:
13749 break;
13750 case OMP_CLAUSE_FIRSTPRIVATE:
13751 omp_firstprivatize_data_region:
13752 if (is_gimple_omp_oacc (ctx->stmt))
13753 break;
13754 var = OMP_CLAUSE_DECL (c);
13755 if (omp_privatize_by_reference (var)
13756 || is_gimple_reg_type (TREE_TYPE (var)))
13758 tree new_var = lookup_decl (var, ctx);
13759 tree type;
13760 type = TREE_TYPE (var);
13761 if (omp_privatize_by_reference (var))
13762 type = TREE_TYPE (type);
13763 if ((INTEGRAL_TYPE_P (type)
13764 && TYPE_PRECISION (type) <= POINTER_SIZE)
13765 || TREE_CODE (type) == POINTER_TYPE)
13767 x = build_receiver_ref (var, false, ctx);
13768 if (TREE_CODE (type) != POINTER_TYPE)
13769 x = fold_convert (pointer_sized_int_node, x);
13770 x = fold_convert (type, x);
13771 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13772 fb_rvalue);
13773 if (omp_privatize_by_reference (var))
13775 tree v = create_tmp_var_raw (type, get_name (var));
13776 gimple_add_tmp_var (v);
13777 TREE_ADDRESSABLE (v) = 1;
13778 gimple_seq_add_stmt (&new_body,
13779 gimple_build_assign (v, x));
13780 x = build_fold_addr_expr (v);
13782 gimple_seq_add_stmt (&new_body,
13783 gimple_build_assign (new_var, x));
13785 else
13787 bool by_ref = !omp_privatize_by_reference (var);
13788 x = build_receiver_ref (var, by_ref, ctx);
13789 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13790 fb_rvalue);
13791 gimple_seq_add_stmt (&new_body,
13792 gimple_build_assign (new_var, x));
13795 else if (is_variable_sized (var))
13797 tree pvar = DECL_VALUE_EXPR (var);
13798 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13799 pvar = TREE_OPERAND (pvar, 0);
13800 gcc_assert (DECL_P (pvar));
13801 tree new_var = lookup_decl (pvar, ctx);
13802 x = build_receiver_ref (var, false, ctx);
13803 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13804 gimple_seq_add_stmt (&new_body,
13805 gimple_build_assign (new_var, x));
13807 break;
13808 case OMP_CLAUSE_PRIVATE:
13809 if (is_gimple_omp_oacc (ctx->stmt))
13810 break;
13811 var = OMP_CLAUSE_DECL (c);
13812 if (omp_privatize_by_reference (var))
13814 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13815 tree new_var = lookup_decl (var, ctx);
13816 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13817 if (TREE_CONSTANT (x))
13819 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13820 get_name (var));
13821 gimple_add_tmp_var (x);
13822 TREE_ADDRESSABLE (x) = 1;
13823 x = build_fold_addr_expr_loc (clause_loc, x);
13825 else
13826 break;
13828 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13829 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13830 gimple_seq_add_stmt (&new_body,
13831 gimple_build_assign (new_var, x));
13833 break;
13834 case OMP_CLAUSE_USE_DEVICE_PTR:
13835 case OMP_CLAUSE_USE_DEVICE_ADDR:
13836 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13837 case OMP_CLAUSE_IS_DEVICE_PTR:
13838 tree new_var;
13839 gimple_seq assign_body;
13840 bool is_array_data;
13841 bool do_optional_check;
13842 assign_body = NULL;
13843 do_optional_check = false;
13844 var = OMP_CLAUSE_DECL (c);
13845 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13846 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR && is_array_data)
13847 goto omp_firstprivatize_data_region;
13849 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13850 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13851 x = build_sender_ref (is_array_data
13852 ? (splay_tree_key) &DECL_NAME (var)
13853 : (splay_tree_key) &DECL_UID (var), ctx);
13854 else
13856 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13858 while (TREE_CODE (var) == INDIRECT_REF
13859 || TREE_CODE (var) == ARRAY_REF)
13860 var = TREE_OPERAND (var, 0);
13862 x = build_receiver_ref (var, false, ctx);
13865 if (is_array_data)
13867 bool is_ref = omp_privatize_by_reference (var);
13868 do_optional_check = true;
13869 /* First, we copy the descriptor data from the host; then
13870 we update its data to point to the target address. */
13871 new_var = lookup_decl (var, ctx);
13872 new_var = DECL_VALUE_EXPR (new_var);
13873 tree v = new_var;
13874 tree v2 = var;
13875 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR
13876 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR)
13877 v2 = maybe_lookup_decl_in_outer_ctx (var, ctx);
13879 if (is_ref)
13881 v2 = build_fold_indirect_ref (v2);
13882 v = create_tmp_var_raw (TREE_TYPE (v2), get_name (var));
13883 gimple_add_tmp_var (v);
13884 TREE_ADDRESSABLE (v) = 1;
13885 gimplify_assign (v, v2, &assign_body);
13886 tree rhs = build_fold_addr_expr (v);
13887 gimple_seq_add_stmt (&assign_body,
13888 gimple_build_assign (new_var, rhs));
13890 else
13891 gimplify_assign (new_var, v2, &assign_body);
13893 v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13894 gcc_assert (v2);
13895 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13896 gimple_seq_add_stmt (&assign_body,
13897 gimple_build_assign (v2, x));
13899 else if (is_variable_sized (var))
13901 tree pvar = DECL_VALUE_EXPR (var);
13902 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13903 pvar = TREE_OPERAND (pvar, 0);
13904 gcc_assert (DECL_P (pvar));
13905 new_var = lookup_decl (pvar, ctx);
13906 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13907 gimple_seq_add_stmt (&assign_body,
13908 gimple_build_assign (new_var, x));
13910 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13911 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13912 && !omp_privatize_by_reference (var)
13913 && !omp_is_allocatable_or_ptr (var))
13914 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13916 new_var = lookup_decl (var, ctx);
13917 new_var = DECL_VALUE_EXPR (new_var);
13918 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13919 new_var = TREE_OPERAND (new_var, 0);
13920 gcc_assert (DECL_P (new_var));
13921 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13922 gimple_seq_add_stmt (&assign_body,
13923 gimple_build_assign (new_var, x));
13925 else
13927 tree type = TREE_TYPE (var);
13928 new_var = lookup_decl (var, ctx);
13929 if (omp_privatize_by_reference (var))
13931 type = TREE_TYPE (type);
13932 if (POINTER_TYPE_P (type)
13933 && TREE_CODE (type) != ARRAY_TYPE
13934 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13935 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13936 || (omp_privatize_by_reference (var)
13937 && omp_is_allocatable_or_ptr (var))))
13939 tree v = create_tmp_var_raw (type, get_name (var));
13940 gimple_add_tmp_var (v);
13941 TREE_ADDRESSABLE (v) = 1;
13942 x = fold_convert (type, x);
13943 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13944 fb_rvalue);
13945 gimple_seq_add_stmt (&assign_body,
13946 gimple_build_assign (v, x));
13947 x = build_fold_addr_expr (v);
13948 do_optional_check = true;
13951 new_var = DECL_VALUE_EXPR (new_var);
13952 x = fold_convert (TREE_TYPE (new_var), x);
13953 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13954 gimple_seq_add_stmt (&assign_body,
13955 gimple_build_assign (new_var, x));
13957 tree present;
13958 present = ((do_optional_check
13959 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13960 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13961 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13962 : NULL_TREE);
13963 if (present)
13965 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13966 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13967 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13968 glabel *null_glabel = gimple_build_label (null_label);
13969 glabel *notnull_glabel = gimple_build_label (notnull_label);
13970 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13971 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13972 fb_rvalue);
13973 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13974 fb_rvalue);
13975 gcond *cond = gimple_build_cond_from_tree (present,
13976 notnull_label,
13977 null_label);
13978 gimple_seq_add_stmt (&new_body, cond);
13979 gimple_seq_add_stmt (&new_body, null_glabel);
13980 gimplify_assign (new_var, null_pointer_node, &new_body);
13981 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13982 gimple_seq_add_stmt (&new_body, notnull_glabel);
13983 gimple_seq_add_seq (&new_body, assign_body);
13984 gimple_seq_add_stmt (&new_body,
13985 gimple_build_label (opt_arg_label));
13987 else
13988 gimple_seq_add_seq (&new_body, assign_body);
13989 break;
13991 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13992 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13993 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13994 or references to VLAs. */
13995 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13996 switch (OMP_CLAUSE_CODE (c))
13998 tree var;
13999 default:
14000 break;
14001 case OMP_CLAUSE_MAP:
14002 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
14003 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
14005 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14006 poly_int64 offset = 0;
14007 gcc_assert (prev);
14008 var = OMP_CLAUSE_DECL (c);
14009 if (DECL_P (var)
14010 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
14011 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
14012 ctx))
14013 && varpool_node::get_create (var)->offloadable)
14014 break;
14015 if (TREE_CODE (var) == INDIRECT_REF
14016 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
14017 var = TREE_OPERAND (var, 0);
14018 if (TREE_CODE (var) == COMPONENT_REF)
14020 var = get_addr_base_and_unit_offset (var, &offset);
14021 gcc_assert (var != NULL_TREE && DECL_P (var));
14023 else if (DECL_SIZE (var)
14024 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
14026 tree var2 = DECL_VALUE_EXPR (var);
14027 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
14028 var2 = TREE_OPERAND (var2, 0);
14029 gcc_assert (DECL_P (var2));
14030 var = var2;
14032 tree new_var = lookup_decl (var, ctx), x;
14033 tree type = TREE_TYPE (new_var);
14034 bool is_ref;
14035 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
14036 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
14037 == COMPONENT_REF))
14039 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
14040 is_ref = true;
14041 new_var = build2 (MEM_REF, type,
14042 build_fold_addr_expr (new_var),
14043 build_int_cst (build_pointer_type (type),
14044 offset));
14046 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
14048 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
14049 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
14050 new_var = build2 (MEM_REF, type,
14051 build_fold_addr_expr (new_var),
14052 build_int_cst (build_pointer_type (type),
14053 offset));
14055 else
14056 is_ref = omp_privatize_by_reference (var);
14057 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
14058 is_ref = false;
14059 bool ref_to_array = false;
14060 bool ref_to_ptr = false;
14061 if (is_ref)
14063 type = TREE_TYPE (type);
14064 if (TREE_CODE (type) == ARRAY_TYPE)
14066 type = build_pointer_type (type);
14067 ref_to_array = true;
14070 else if (TREE_CODE (type) == ARRAY_TYPE)
14072 tree decl2 = DECL_VALUE_EXPR (new_var);
14073 gcc_assert (TREE_CODE (decl2) == MEM_REF);
14074 decl2 = TREE_OPERAND (decl2, 0);
14075 gcc_assert (DECL_P (decl2));
14076 new_var = decl2;
14077 type = TREE_TYPE (new_var);
14079 else if (TREE_CODE (type) == REFERENCE_TYPE
14080 && TREE_CODE (TREE_TYPE (type)) == POINTER_TYPE)
14082 type = TREE_TYPE (type);
14083 ref_to_ptr = true;
14085 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
14086 x = fold_convert_loc (clause_loc, type, x);
14087 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
14089 tree bias = OMP_CLAUSE_SIZE (c);
14090 if (DECL_P (bias))
14091 bias = lookup_decl (bias, ctx);
14092 bias = fold_convert_loc (clause_loc, sizetype, bias);
14093 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
14094 bias);
14095 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
14096 TREE_TYPE (x), x, bias);
14098 if (ref_to_array)
14099 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
14100 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14101 if ((is_ref && !ref_to_array)
14102 || ref_to_ptr)
14104 tree t = create_tmp_var_raw (type, get_name (var));
14105 gimple_add_tmp_var (t);
14106 TREE_ADDRESSABLE (t) = 1;
14107 gimple_seq_add_stmt (&new_body,
14108 gimple_build_assign (t, x));
14109 x = build_fold_addr_expr_loc (clause_loc, t);
14111 gimple_seq_add_stmt (&new_body,
14112 gimple_build_assign (new_var, x));
14113 prev = NULL_TREE;
14115 else if (OMP_CLAUSE_CHAIN (c)
14116 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
14117 == OMP_CLAUSE_MAP
14118 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
14119 == GOMP_MAP_FIRSTPRIVATE_POINTER
14120 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
14121 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
14122 prev = c;
14123 break;
14124 case OMP_CLAUSE_PRIVATE:
14125 var = OMP_CLAUSE_DECL (c);
14126 if (is_variable_sized (var))
14128 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14129 tree new_var = lookup_decl (var, ctx);
14130 tree pvar = DECL_VALUE_EXPR (var);
14131 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
14132 pvar = TREE_OPERAND (pvar, 0);
14133 gcc_assert (DECL_P (pvar));
14134 tree new_pvar = lookup_decl (pvar, ctx);
14135 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
14136 tree al = size_int (DECL_ALIGN (var));
14137 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
14138 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14139 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
14140 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14141 gimple_seq_add_stmt (&new_body,
14142 gimple_build_assign (new_pvar, x));
14144 else if (omp_privatize_by_reference (var)
14145 && !is_gimple_omp_oacc (ctx->stmt))
14147 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14148 tree new_var = lookup_decl (var, ctx);
14149 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
14150 if (TREE_CONSTANT (x))
14151 break;
14152 else
14154 tree atmp
14155 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
14156 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
14157 tree al = size_int (TYPE_ALIGN (rtype));
14158 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14161 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
14162 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14163 gimple_seq_add_stmt (&new_body,
14164 gimple_build_assign (new_var, x));
14166 break;
14169 gimple_seq fork_seq = NULL;
14170 gimple_seq join_seq = NULL;
14172 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
14174 /* If there are reductions on the offloaded region itself, treat
14175 them as a dummy GANG loop. */
14176 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
14178 gcall *private_marker = lower_oacc_private_marker (ctx);
14180 if (private_marker)
14181 gimple_call_set_arg (private_marker, 2, level);
14183 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
14184 false, NULL, private_marker, NULL, &fork_seq,
14185 &join_seq, ctx);
14188 gimple_seq_add_seq (&new_body, fork_seq);
14189 gimple_seq_add_seq (&new_body, tgt_body);
14190 gimple_seq_add_seq (&new_body, join_seq);
14192 if (offloaded)
14194 new_body = maybe_catch_exception (new_body);
14195 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
14197 gimple_omp_set_body (stmt, new_body);
14200 bind = gimple_build_bind (NULL, NULL,
14201 tgt_bind ? gimple_bind_block (tgt_bind)
14202 : NULL_TREE);
14203 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
14204 gimple_bind_add_seq (bind, ilist);
14205 gimple_bind_add_stmt (bind, stmt);
14206 gimple_bind_add_seq (bind, olist);
14208 pop_gimplify_context (NULL);
14210 if (dep_bind)
14212 gimple_bind_add_seq (dep_bind, dep_ilist);
14213 gimple_bind_add_stmt (dep_bind, bind);
14214 gimple_bind_add_seq (dep_bind, dep_olist);
14215 pop_gimplify_context (dep_bind);
14219 /* Expand code for an OpenMP teams directive. */
14221 static void
14222 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14224 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
14225 push_gimplify_context ();
14227 tree block = make_node (BLOCK);
14228 gbind *bind = gimple_build_bind (NULL, NULL, block);
14229 gsi_replace (gsi_p, bind, true);
14230 gimple_seq bind_body = NULL;
14231 gimple_seq dlist = NULL;
14232 gimple_seq olist = NULL;
14234 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14235 OMP_CLAUSE_NUM_TEAMS);
14236 tree num_teams_lower = NULL_TREE;
14237 if (num_teams == NULL_TREE)
14238 num_teams = build_int_cst (unsigned_type_node, 0);
14239 else
14241 num_teams_lower = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams);
14242 if (num_teams_lower)
14244 num_teams_lower = fold_convert (unsigned_type_node, num_teams_lower);
14245 gimplify_expr (&num_teams_lower, &bind_body, NULL, is_gimple_val,
14246 fb_rvalue);
14248 num_teams = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams);
14249 num_teams = fold_convert (unsigned_type_node, num_teams);
14250 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
14252 if (num_teams_lower == NULL_TREE)
14253 num_teams_lower = num_teams;
14254 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14255 OMP_CLAUSE_THREAD_LIMIT);
14256 if (thread_limit == NULL_TREE)
14257 thread_limit = build_int_cst (unsigned_type_node, 0);
14258 else
14260 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
14261 thread_limit = fold_convert (unsigned_type_node, thread_limit);
14262 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
14263 fb_rvalue);
14265 location_t loc = gimple_location (teams_stmt);
14266 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4);
14267 tree rettype = TREE_TYPE (TREE_TYPE (decl));
14268 tree first = create_tmp_var (rettype);
14269 gimple_seq_add_stmt (&bind_body,
14270 gimple_build_assign (first, build_one_cst (rettype)));
14271 tree llabel = create_artificial_label (loc);
14272 gimple_seq_add_stmt (&bind_body, gimple_build_label (llabel));
14273 gimple *call
14274 = gimple_build_call (decl, 4, num_teams_lower, num_teams, thread_limit,
14275 first);
14276 gimple_set_location (call, loc);
14277 tree temp = create_tmp_var (rettype);
14278 gimple_call_set_lhs (call, temp);
14279 gimple_seq_add_stmt (&bind_body, call);
14281 tree tlabel = create_artificial_label (loc);
14282 tree flabel = create_artificial_label (loc);
14283 gimple *cond = gimple_build_cond (NE_EXPR, temp, build_zero_cst (rettype),
14284 tlabel, flabel);
14285 gimple_seq_add_stmt (&bind_body, cond);
14286 gimple_seq_add_stmt (&bind_body, gimple_build_label (tlabel));
14287 gimple_seq_add_stmt (&bind_body,
14288 gimple_build_assign (first, build_zero_cst (rettype)));
14290 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
14291 &bind_body, &dlist, ctx, NULL);
14292 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
14293 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
14294 NULL, ctx);
14295 gimple_seq_add_stmt (&bind_body, teams_stmt);
14297 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
14298 gimple_omp_set_body (teams_stmt, NULL);
14299 gimple_seq_add_seq (&bind_body, olist);
14300 gimple_seq_add_seq (&bind_body, dlist);
14301 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
14302 gimple_seq_add_stmt (&bind_body, gimple_build_goto (llabel));
14303 gimple_seq_add_stmt (&bind_body, gimple_build_label (flabel));
14304 gimple_bind_set_body (bind, bind_body);
14306 pop_gimplify_context (bind);
14308 gimple_bind_append_vars (bind, ctx->block_vars);
14309 BLOCK_VARS (block) = ctx->block_vars;
14310 if (BLOCK_VARS (block))
14311 TREE_USED (block) = 1;
14314 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14315 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14316 of OMP context, but with make_addressable_vars set. */
14318 static tree
14319 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
14320 void *data)
14322 tree t = *tp;
14324 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14325 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
14326 && data == NULL
14327 && DECL_HAS_VALUE_EXPR_P (t))
14328 return t;
14330 if (make_addressable_vars
14331 && DECL_P (t)
14332 && bitmap_bit_p (make_addressable_vars, DECL_UID (t)))
14333 return t;
14335 /* If a global variable has been privatized, TREE_CONSTANT on
14336 ADDR_EXPR might be wrong. */
14337 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
14338 recompute_tree_invariant_for_addr_expr (t);
14340 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
14341 return NULL_TREE;
14344 /* Data to be communicated between lower_omp_regimplify_operands and
14345 lower_omp_regimplify_operands_p. */
14347 struct lower_omp_regimplify_operands_data
14349 omp_context *ctx;
14350 vec<tree> *decls;
14353 /* Helper function for lower_omp_regimplify_operands. Find
14354 omp_member_access_dummy_var vars and adjust temporarily their
14355 DECL_VALUE_EXPRs if needed. */
14357 static tree
14358 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
14359 void *data)
14361 tree t = omp_member_access_dummy_var (*tp);
14362 if (t)
14364 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
14365 lower_omp_regimplify_operands_data *ldata
14366 = (lower_omp_regimplify_operands_data *) wi->info;
14367 tree o = maybe_lookup_decl (t, ldata->ctx);
14368 if (o != t)
14370 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
14371 ldata->decls->safe_push (*tp);
14372 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
14373 SET_DECL_VALUE_EXPR (*tp, v);
14376 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
14377 return NULL_TREE;
14380 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14381 of omp_member_access_dummy_var vars during regimplification. */
14383 static void
14384 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
14385 gimple_stmt_iterator *gsi_p)
14387 auto_vec<tree, 10> decls;
14388 if (ctx)
14390 struct walk_stmt_info wi;
14391 memset (&wi, '\0', sizeof (wi));
14392 struct lower_omp_regimplify_operands_data data;
14393 data.ctx = ctx;
14394 data.decls = &decls;
14395 wi.info = &data;
14396 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14398 gimple_regimplify_operands (stmt, gsi_p);
14399 while (!decls.is_empty ())
14401 tree t = decls.pop ();
14402 tree v = decls.pop ();
14403 SET_DECL_VALUE_EXPR (t, v);
14407 static void
14408 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14410 gimple *stmt = gsi_stmt (*gsi_p);
14411 struct walk_stmt_info wi;
14412 gcall *call_stmt;
14414 if (gimple_has_location (stmt))
14415 input_location = gimple_location (stmt);
14417 if (make_addressable_vars)
14418 memset (&wi, '\0', sizeof (wi));
14420 /* If we have issued syntax errors, avoid doing any heavy lifting.
14421 Just replace the OMP directives with a NOP to avoid
14422 confusing RTL expansion. */
14423 if (seen_error () && is_gimple_omp (stmt))
14425 gsi_replace (gsi_p, gimple_build_nop (), true);
14426 return;
14429 switch (gimple_code (stmt))
14431 case GIMPLE_COND:
14433 gcond *cond_stmt = as_a <gcond *> (stmt);
14434 if ((ctx || make_addressable_vars)
14435 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14436 lower_omp_regimplify_p,
14437 ctx ? NULL : &wi, NULL)
14438 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14439 lower_omp_regimplify_p,
14440 ctx ? NULL : &wi, NULL)))
14441 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14443 break;
14444 case GIMPLE_CATCH:
14445 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14446 break;
14447 case GIMPLE_EH_FILTER:
14448 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14449 break;
14450 case GIMPLE_TRY:
14451 lower_omp (gimple_try_eval_ptr (stmt), ctx);
14452 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14453 break;
14454 case GIMPLE_ASSUME:
14455 lower_omp (gimple_assume_body_ptr (stmt), ctx);
14456 break;
14457 case GIMPLE_TRANSACTION:
14458 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14459 ctx);
14460 break;
14461 case GIMPLE_BIND:
14462 if (ctx && is_gimple_omp_oacc (ctx->stmt))
14464 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14465 oacc_privatization_scan_decl_chain (ctx, vars);
14467 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14468 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14469 break;
14470 case GIMPLE_OMP_PARALLEL:
14471 case GIMPLE_OMP_TASK:
14472 ctx = maybe_lookup_ctx (stmt);
14473 gcc_assert (ctx);
14474 if (ctx->cancellable)
14475 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14476 lower_omp_taskreg (gsi_p, ctx);
14477 break;
14478 case GIMPLE_OMP_FOR:
14479 ctx = maybe_lookup_ctx (stmt);
14480 gcc_assert (ctx);
14481 if (ctx->cancellable)
14482 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14483 lower_omp_for (gsi_p, ctx);
14484 break;
14485 case GIMPLE_OMP_SECTIONS:
14486 ctx = maybe_lookup_ctx (stmt);
14487 gcc_assert (ctx);
14488 if (ctx->cancellable)
14489 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14490 lower_omp_sections (gsi_p, ctx);
14491 break;
14492 case GIMPLE_OMP_SCOPE:
14493 ctx = maybe_lookup_ctx (stmt);
14494 gcc_assert (ctx);
14495 lower_omp_scope (gsi_p, ctx);
14496 break;
14497 case GIMPLE_OMP_SINGLE:
14498 ctx = maybe_lookup_ctx (stmt);
14499 gcc_assert (ctx);
14500 lower_omp_single (gsi_p, ctx);
14501 break;
14502 case GIMPLE_OMP_MASTER:
14503 case GIMPLE_OMP_MASKED:
14504 ctx = maybe_lookup_ctx (stmt);
14505 gcc_assert (ctx);
14506 lower_omp_master (gsi_p, ctx);
14507 break;
14508 case GIMPLE_OMP_TASKGROUP:
14509 ctx = maybe_lookup_ctx (stmt);
14510 gcc_assert (ctx);
14511 lower_omp_taskgroup (gsi_p, ctx);
14512 break;
14513 case GIMPLE_OMP_ORDERED:
14514 ctx = maybe_lookup_ctx (stmt);
14515 gcc_assert (ctx);
14516 lower_omp_ordered (gsi_p, ctx);
14517 break;
14518 case GIMPLE_OMP_SCAN:
14519 ctx = maybe_lookup_ctx (stmt);
14520 gcc_assert (ctx);
14521 lower_omp_scan (gsi_p, ctx);
14522 break;
14523 case GIMPLE_OMP_CRITICAL:
14524 ctx = maybe_lookup_ctx (stmt);
14525 gcc_assert (ctx);
14526 lower_omp_critical (gsi_p, ctx);
14527 break;
14528 case GIMPLE_OMP_ATOMIC_LOAD:
14529 if ((ctx || make_addressable_vars)
14530 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14531 as_a <gomp_atomic_load *> (stmt)),
14532 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14533 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14534 break;
14535 case GIMPLE_OMP_TARGET:
14536 ctx = maybe_lookup_ctx (stmt);
14537 gcc_assert (ctx);
14538 lower_omp_target (gsi_p, ctx);
14539 break;
14540 case GIMPLE_OMP_TEAMS:
14541 ctx = maybe_lookup_ctx (stmt);
14542 gcc_assert (ctx);
14543 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14544 lower_omp_taskreg (gsi_p, ctx);
14545 else
14546 lower_omp_teams (gsi_p, ctx);
14547 break;
14548 case GIMPLE_CALL:
14549 tree fndecl;
14550 call_stmt = as_a <gcall *> (stmt);
14551 fndecl = gimple_call_fndecl (call_stmt);
14552 if (fndecl
14553 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14554 switch (DECL_FUNCTION_CODE (fndecl))
14556 case BUILT_IN_GOMP_BARRIER:
14557 if (ctx == NULL)
14558 break;
14559 /* FALLTHRU */
14560 case BUILT_IN_GOMP_CANCEL:
14561 case BUILT_IN_GOMP_CANCELLATION_POINT:
14562 omp_context *cctx;
14563 cctx = ctx;
14564 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14565 cctx = cctx->outer;
14566 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14567 if (!cctx->cancellable)
14569 if (DECL_FUNCTION_CODE (fndecl)
14570 == BUILT_IN_GOMP_CANCELLATION_POINT)
14572 stmt = gimple_build_nop ();
14573 gsi_replace (gsi_p, stmt, false);
14575 break;
14577 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14579 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14580 gimple_call_set_fndecl (call_stmt, fndecl);
14581 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14583 tree lhs;
14584 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14585 gimple_call_set_lhs (call_stmt, lhs);
14586 tree fallthru_label;
14587 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14588 gimple *g;
14589 g = gimple_build_label (fallthru_label);
14590 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14591 g = gimple_build_cond (NE_EXPR, lhs,
14592 fold_convert (TREE_TYPE (lhs),
14593 boolean_false_node),
14594 cctx->cancel_label, fallthru_label);
14595 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14596 break;
14597 default:
14598 break;
14600 goto regimplify;
14602 case GIMPLE_ASSIGN:
14603 for (omp_context *up = ctx; up; up = up->outer)
14605 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14606 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14607 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14608 || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14609 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14610 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14611 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14612 && (gimple_omp_target_kind (up->stmt)
14613 == GF_OMP_TARGET_KIND_DATA)))
14614 continue;
14615 else if (!up->lastprivate_conditional_map)
14616 break;
14617 tree lhs = get_base_address (gimple_assign_lhs (stmt));
14618 if (TREE_CODE (lhs) == MEM_REF
14619 && DECL_P (TREE_OPERAND (lhs, 0))
14620 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14621 0))) == REFERENCE_TYPE)
14622 lhs = TREE_OPERAND (lhs, 0);
14623 if (DECL_P (lhs))
14624 if (tree *v = up->lastprivate_conditional_map->get (lhs))
14626 tree clauses;
14627 if (up->combined_into_simd_safelen1)
14629 up = up->outer;
14630 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14631 up = up->outer;
14633 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14634 clauses = gimple_omp_for_clauses (up->stmt);
14635 else
14636 clauses = gimple_omp_sections_clauses (up->stmt);
14637 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14638 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14639 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14640 OMP_CLAUSE__CONDTEMP_);
14641 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14642 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14643 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14646 /* FALLTHRU */
14648 default:
14649 regimplify:
14650 if ((ctx || make_addressable_vars)
14651 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14652 ctx ? NULL : &wi))
14654 /* Just remove clobbers, this should happen only if we have
14655 "privatized" local addressable variables in SIMD regions,
14656 the clobber isn't needed in that case and gimplifying address
14657 of the ARRAY_REF into a pointer and creating MEM_REF based
14658 clobber would create worse code than we get with the clobber
14659 dropped. */
14660 if (gimple_clobber_p (stmt))
14662 gsi_replace (gsi_p, gimple_build_nop (), true);
14663 break;
14665 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14667 break;
14671 static void
14672 lower_omp (gimple_seq *body, omp_context *ctx)
14674 location_t saved_location = input_location;
14675 gimple_stmt_iterator gsi;
14676 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14677 lower_omp_1 (&gsi, ctx);
14678 /* During gimplification, we haven't folded statments inside offloading
14679 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14680 if (target_nesting_level || taskreg_nesting_level)
14681 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14682 fold_stmt (&gsi);
14683 input_location = saved_location;
14686 /* Main entry point. */
14688 static unsigned int
14689 execute_lower_omp (void)
14691 gimple_seq body;
14692 int i;
14693 omp_context *ctx;
14695 /* This pass always runs, to provide PROP_gimple_lomp.
14696 But often, there is nothing to do. */
14697 if (flag_openacc == 0 && flag_openmp == 0
14698 && flag_openmp_simd == 0)
14699 return 0;
14701 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14702 delete_omp_context);
14704 body = gimple_body (current_function_decl);
14706 scan_omp (&body, NULL);
14707 gcc_assert (taskreg_nesting_level == 0);
14708 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14709 finish_taskreg_scan (ctx);
14710 taskreg_contexts.release ();
14712 if (all_contexts->root)
14714 if (make_addressable_vars)
14715 push_gimplify_context ();
14716 lower_omp (&body, NULL);
14717 if (make_addressable_vars)
14718 pop_gimplify_context (NULL);
14721 if (all_contexts)
14723 splay_tree_delete (all_contexts);
14724 all_contexts = NULL;
14726 BITMAP_FREE (make_addressable_vars);
14727 BITMAP_FREE (global_nonaddressable_vars);
14729 /* If current function is a method, remove artificial dummy VAR_DECL created
14730 for non-static data member privatization, they aren't needed for
14731 debuginfo nor anything else, have been already replaced everywhere in the
14732 IL and cause problems with LTO. */
14733 if (DECL_ARGUMENTS (current_function_decl)
14734 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14735 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14736 == POINTER_TYPE))
14737 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14739 for (auto task_stmt : task_cpyfns)
14740 finalize_task_copyfn (task_stmt);
14741 task_cpyfns.release ();
14742 return 0;
14745 namespace {
14747 const pass_data pass_data_lower_omp =
14749 GIMPLE_PASS, /* type */
14750 "omplower", /* name */
14751 OPTGROUP_OMP, /* optinfo_flags */
14752 TV_NONE, /* tv_id */
14753 PROP_gimple_any, /* properties_required */
14754 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14755 0, /* properties_destroyed */
14756 0, /* todo_flags_start */
14757 0, /* todo_flags_finish */
14760 class pass_lower_omp : public gimple_opt_pass
14762 public:
14763 pass_lower_omp (gcc::context *ctxt)
14764 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14767 /* opt_pass methods: */
14768 unsigned int execute (function *) final override
14770 return execute_lower_omp ();
14773 }; // class pass_lower_omp
14775 } // anon namespace
14777 gimple_opt_pass *
14778 make_pass_lower_omp (gcc::context *ctxt)
14780 return new pass_lower_omp (ctxt);
14783 /* The following is a utility to diagnose structured block violations.
14784 It is not part of the "omplower" pass, as that's invoked too late. It
14785 should be invoked by the respective front ends after gimplification. */
14787 static splay_tree all_labels;
14789 /* Check for mismatched contexts and generate an error if needed. Return
14790 true if an error is detected. */
14792 static bool
14793 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14794 gimple *branch_ctx, gimple *label_ctx)
14796 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14797 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14799 if (label_ctx == branch_ctx)
14800 return false;
14802 const char* kind = NULL;
14804 if (flag_openacc)
14806 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14807 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14809 gcc_checking_assert (kind == NULL);
14810 kind = "OpenACC";
14813 if (kind == NULL)
14815 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14816 kind = "OpenMP";
14819 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14820 so we could traverse it and issue a correct "exit" or "enter" error
14821 message upon a structured block violation.
14823 We built the context by building a list with tree_cons'ing, but there is
14824 no easy counterpart in gimple tuples. It seems like far too much work
14825 for issuing exit/enter error messages. If someone really misses the
14826 distinct error message... patches welcome. */
14828 #if 0
14829 /* Try to avoid confusing the user by producing and error message
14830 with correct "exit" or "enter" verbiage. We prefer "exit"
14831 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14832 if (branch_ctx == NULL)
14833 exit_p = false;
14834 else
14836 while (label_ctx)
14838 if (TREE_VALUE (label_ctx) == branch_ctx)
14840 exit_p = false;
14841 break;
14843 label_ctx = TREE_CHAIN (label_ctx);
14847 if (exit_p)
14848 error ("invalid exit from %s structured block", kind);
14849 else
14850 error ("invalid entry to %s structured block", kind);
14851 #endif
14853 /* If it's obvious we have an invalid entry, be specific about the error. */
14854 if (branch_ctx == NULL)
14855 error ("invalid entry to %s structured block", kind);
14856 else
14858 /* Otherwise, be vague and lazy, but efficient. */
14859 error ("invalid branch to/from %s structured block", kind);
14862 gsi_replace (gsi_p, gimple_build_nop (), false);
14863 return true;
14866 /* Pass 1: Create a minimal tree of structured blocks, and record
14867 where each label is found. */
14869 static tree
14870 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14871 struct walk_stmt_info *wi)
14873 gimple *context = (gimple *) wi->info;
14874 gimple *inner_context;
14875 gimple *stmt = gsi_stmt (*gsi_p);
14877 *handled_ops_p = true;
14879 switch (gimple_code (stmt))
14881 WALK_SUBSTMTS;
14883 case GIMPLE_OMP_PARALLEL:
14884 case GIMPLE_OMP_TASK:
14885 case GIMPLE_OMP_SCOPE:
14886 case GIMPLE_OMP_SECTIONS:
14887 case GIMPLE_OMP_SINGLE:
14888 case GIMPLE_OMP_SECTION:
14889 case GIMPLE_OMP_MASTER:
14890 case GIMPLE_OMP_MASKED:
14891 case GIMPLE_OMP_ORDERED:
14892 case GIMPLE_OMP_SCAN:
14893 case GIMPLE_OMP_CRITICAL:
14894 case GIMPLE_OMP_TARGET:
14895 case GIMPLE_OMP_TEAMS:
14896 case GIMPLE_OMP_TASKGROUP:
14897 /* The minimal context here is just the current OMP construct. */
14898 inner_context = stmt;
14899 wi->info = inner_context;
14900 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14901 wi->info = context;
14902 break;
14904 case GIMPLE_OMP_FOR:
14905 inner_context = stmt;
14906 wi->info = inner_context;
14907 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14908 walk them. */
14909 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14910 diagnose_sb_1, NULL, wi);
14911 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14912 wi->info = context;
14913 break;
14915 case GIMPLE_LABEL:
14916 splay_tree_insert (all_labels,
14917 (splay_tree_key) gimple_label_label (
14918 as_a <glabel *> (stmt)),
14919 (splay_tree_value) context);
14920 break;
14922 default:
14923 break;
14926 return NULL_TREE;
14929 /* Pass 2: Check each branch and see if its context differs from that of
14930 the destination label's context. */
14932 static tree
14933 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14934 struct walk_stmt_info *wi)
14936 gimple *context = (gimple *) wi->info;
14937 splay_tree_node n;
14938 gimple *stmt = gsi_stmt (*gsi_p);
14940 *handled_ops_p = true;
14942 switch (gimple_code (stmt))
14944 WALK_SUBSTMTS;
14946 case GIMPLE_OMP_PARALLEL:
14947 case GIMPLE_OMP_TASK:
14948 case GIMPLE_OMP_SCOPE:
14949 case GIMPLE_OMP_SECTIONS:
14950 case GIMPLE_OMP_SINGLE:
14951 case GIMPLE_OMP_SECTION:
14952 case GIMPLE_OMP_MASTER:
14953 case GIMPLE_OMP_MASKED:
14954 case GIMPLE_OMP_ORDERED:
14955 case GIMPLE_OMP_SCAN:
14956 case GIMPLE_OMP_CRITICAL:
14957 case GIMPLE_OMP_TARGET:
14958 case GIMPLE_OMP_TEAMS:
14959 case GIMPLE_OMP_TASKGROUP:
14960 wi->info = stmt;
14961 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14962 wi->info = context;
14963 break;
14965 case GIMPLE_OMP_FOR:
14966 wi->info = stmt;
14967 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14968 walk them. */
14969 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14970 diagnose_sb_2, NULL, wi);
14971 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14972 wi->info = context;
14973 break;
14975 case GIMPLE_COND:
14977 gcond *cond_stmt = as_a <gcond *> (stmt);
14978 tree lab = gimple_cond_true_label (cond_stmt);
14979 if (lab)
14981 n = splay_tree_lookup (all_labels,
14982 (splay_tree_key) lab);
14983 diagnose_sb_0 (gsi_p, context,
14984 n ? (gimple *) n->value : NULL);
14986 lab = gimple_cond_false_label (cond_stmt);
14987 if (lab)
14989 n = splay_tree_lookup (all_labels,
14990 (splay_tree_key) lab);
14991 diagnose_sb_0 (gsi_p, context,
14992 n ? (gimple *) n->value : NULL);
14995 break;
14997 case GIMPLE_GOTO:
14999 tree lab = gimple_goto_dest (stmt);
15000 if (TREE_CODE (lab) != LABEL_DECL)
15001 break;
15003 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
15004 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
15006 break;
15008 case GIMPLE_SWITCH:
15010 gswitch *switch_stmt = as_a <gswitch *> (stmt);
15011 unsigned int i;
15012 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
15014 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
15015 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
15016 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
15017 break;
15020 break;
15022 case GIMPLE_RETURN:
15023 diagnose_sb_0 (gsi_p, context, NULL);
15024 break;
15026 default:
15027 break;
15030 return NULL_TREE;
15033 static unsigned int
15034 diagnose_omp_structured_block_errors (void)
15036 struct walk_stmt_info wi;
15037 gimple_seq body = gimple_body (current_function_decl);
15039 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
15041 memset (&wi, 0, sizeof (wi));
15042 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
15044 memset (&wi, 0, sizeof (wi));
15045 wi.want_locations = true;
15046 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
15048 gimple_set_body (current_function_decl, body);
15050 splay_tree_delete (all_labels);
15051 all_labels = NULL;
15053 return 0;
15056 namespace {
15058 const pass_data pass_data_diagnose_omp_blocks =
15060 GIMPLE_PASS, /* type */
15061 "*diagnose_omp_blocks", /* name */
15062 OPTGROUP_OMP, /* optinfo_flags */
15063 TV_NONE, /* tv_id */
15064 PROP_gimple_any, /* properties_required */
15065 0, /* properties_provided */
15066 0, /* properties_destroyed */
15067 0, /* todo_flags_start */
15068 0, /* todo_flags_finish */
15071 class pass_diagnose_omp_blocks : public gimple_opt_pass
15073 public:
15074 pass_diagnose_omp_blocks (gcc::context *ctxt)
15075 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
15078 /* opt_pass methods: */
15079 bool gate (function *) final override
15081 return flag_openacc || flag_openmp || flag_openmp_simd;
15083 unsigned int execute (function *) final override
15085 return diagnose_omp_structured_block_errors ();
15088 }; // class pass_diagnose_omp_blocks
15090 } // anon namespace
15092 gimple_opt_pass *
15093 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
15095 return new pass_diagnose_omp_blocks (ctxt);
15099 #include "gt-omp-low.h"