Add assember CFI directives to millicode division and remainder routines.
[official-gcc.git] / gcc / omp-low.cc
blob1818132830ff712316fc8abcd4df54585570f110
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2023 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "gimplify.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
154 /* True if this construct can be cancelled. */
155 bool cancellable;
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap make_addressable_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
194 static vec<gomp_task *> task_cpyfns;
196 static void scan_omp (gimple_seq *, omp_context *);
197 static tree scan_omp_1_op (tree *, int *, void *);
198 static bool omp_maybe_offloaded_ctx (omp_context *ctx);
200 #define WALK_SUBSTMTS \
201 case GIMPLE_BIND: \
202 case GIMPLE_TRY: \
203 case GIMPLE_CATCH: \
204 case GIMPLE_EH_FILTER: \
205 case GIMPLE_ASSUME: \
206 case GIMPLE_TRANSACTION: \
207 /* The sub-statements for these should be walked. */ \
208 *handled_ops_p = false; \
209 break;
211 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
212 (This doesn't include OpenACC 'kernels' decomposed parts.) */
214 static bool
215 is_oacc_parallel_or_serial (omp_context *ctx)
217 enum gimple_code outer_type = gimple_code (ctx->stmt);
218 return ((outer_type == GIMPLE_OMP_TARGET)
219 && ((gimple_omp_target_kind (ctx->stmt)
220 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
221 || (gimple_omp_target_kind (ctx->stmt)
222 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
225 /* Return whether CTX represents an OpenACC 'kernels' construct.
226 (This doesn't include OpenACC 'kernels' decomposed parts.) */
228 static bool
229 is_oacc_kernels (omp_context *ctx)
231 enum gimple_code outer_type = gimple_code (ctx->stmt);
232 return ((outer_type == GIMPLE_OMP_TARGET)
233 && (gimple_omp_target_kind (ctx->stmt)
234 == GF_OMP_TARGET_KIND_OACC_KERNELS));
237 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
239 static bool
240 is_oacc_kernels_decomposed_part (omp_context *ctx)
242 enum gimple_code outer_type = gimple_code (ctx->stmt);
243 return ((outer_type == GIMPLE_OMP_TARGET)
244 && ((gimple_omp_target_kind (ctx->stmt)
245 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
246 || (gimple_omp_target_kind (ctx->stmt)
247 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
248 || (gimple_omp_target_kind (ctx->stmt)
249 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
252 /* Return true if STMT corresponds to an OpenMP target region. */
253 static bool
254 is_omp_target (gimple *stmt)
256 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
258 int kind = gimple_omp_target_kind (stmt);
259 return (kind == GF_OMP_TARGET_KIND_REGION
260 || kind == GF_OMP_TARGET_KIND_DATA
261 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
262 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
264 return false;
267 /* If DECL is the artificial dummy VAR_DECL created for non-static
268 data member privatization, return the underlying "this" parameter,
269 otherwise return NULL. */
271 tree
272 omp_member_access_dummy_var (tree decl)
274 if (!VAR_P (decl)
275 || !DECL_ARTIFICIAL (decl)
276 || !DECL_IGNORED_P (decl)
277 || !DECL_HAS_VALUE_EXPR_P (decl)
278 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
279 return NULL_TREE;
281 tree v = DECL_VALUE_EXPR (decl);
282 if (TREE_CODE (v) != COMPONENT_REF)
283 return NULL_TREE;
285 while (1)
286 switch (TREE_CODE (v))
288 case COMPONENT_REF:
289 case MEM_REF:
290 case INDIRECT_REF:
291 CASE_CONVERT:
292 case POINTER_PLUS_EXPR:
293 v = TREE_OPERAND (v, 0);
294 continue;
295 case PARM_DECL:
296 if (DECL_CONTEXT (v) == current_function_decl
297 && DECL_ARTIFICIAL (v)
298 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
299 return v;
300 return NULL_TREE;
301 default:
302 return NULL_TREE;
306 /* Helper for unshare_and_remap, called through walk_tree. */
308 static tree
309 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
311 tree *pair = (tree *) data;
312 if (*tp == pair[0])
314 *tp = unshare_expr (pair[1]);
315 *walk_subtrees = 0;
317 else if (IS_TYPE_OR_DECL_P (*tp))
318 *walk_subtrees = 0;
319 return NULL_TREE;
322 /* Return unshare_expr (X) with all occurrences of FROM
323 replaced with TO. */
325 static tree
326 unshare_and_remap (tree x, tree from, tree to)
328 tree pair[2] = { from, to };
329 x = unshare_expr (x);
330 walk_tree (&x, unshare_and_remap_1, pair, NULL);
331 return x;
334 /* Convenience function for calling scan_omp_1_op on tree operands. */
336 static inline tree
337 scan_omp_op (tree *tp, omp_context *ctx)
339 struct walk_stmt_info wi;
341 memset (&wi, 0, sizeof (wi));
342 wi.info = ctx;
343 wi.want_locations = true;
345 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
348 static void lower_omp (gimple_seq *, omp_context *);
349 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
350 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
352 /* Return true if CTX is for an omp parallel. */
354 static inline bool
355 is_parallel_ctx (omp_context *ctx)
357 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
361 /* Return true if CTX is for an omp task. */
363 static inline bool
364 is_task_ctx (omp_context *ctx)
366 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
370 /* Return true if CTX is for an omp taskloop. */
372 static inline bool
373 is_taskloop_ctx (omp_context *ctx)
375 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
376 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
380 /* Return true if CTX is for a host omp teams. */
382 static inline bool
383 is_host_teams_ctx (omp_context *ctx)
385 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
386 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
389 /* Return true if CTX is for an omp parallel or omp task or host omp teams
390 (the last one is strictly not a task region in OpenMP speak, but we
391 need to treat it similarly). */
393 static inline bool
394 is_taskreg_ctx (omp_context *ctx)
396 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
399 /* Return true if EXPR is variable sized. */
401 static inline bool
402 is_variable_sized (const_tree expr)
404 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
407 /* Lookup variables. The "maybe" form
408 allows for the variable form to not have been entered, otherwise we
409 assert that the variable must have been entered. */
411 static inline tree
412 lookup_decl (tree var, omp_context *ctx)
414 tree *n = ctx->cb.decl_map->get (var);
415 return *n;
418 static inline tree
419 maybe_lookup_decl (const_tree var, omp_context *ctx)
421 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
422 return n ? *n : NULL_TREE;
425 static inline tree
426 lookup_field (tree var, omp_context *ctx)
428 splay_tree_node n;
429 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
430 return (tree) n->value;
433 static inline tree
434 lookup_sfield (splay_tree_key key, omp_context *ctx)
436 splay_tree_node n;
437 n = splay_tree_lookup (ctx->sfield_map
438 ? ctx->sfield_map : ctx->field_map, key);
439 return (tree) n->value;
442 static inline tree
443 lookup_sfield (tree var, omp_context *ctx)
445 return lookup_sfield ((splay_tree_key) var, ctx);
448 static inline tree
449 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
451 splay_tree_node n;
452 n = splay_tree_lookup (ctx->field_map, key);
453 return n ? (tree) n->value : NULL_TREE;
456 static inline tree
457 maybe_lookup_field (tree var, omp_context *ctx)
459 return maybe_lookup_field ((splay_tree_key) var, ctx);
462 /* Return true if DECL should be copied by pointer. SHARED_CTX is
463 the parallel context if DECL is to be shared. */
465 static bool
466 use_pointer_for_field (tree decl, omp_context *shared_ctx)
468 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
469 || TYPE_ATOMIC (TREE_TYPE (decl)))
470 return true;
472 /* We can only use copy-in/copy-out semantics for shared variables
473 when we know the value is not accessible from an outer scope. */
474 if (shared_ctx)
476 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
478 /* ??? Trivially accessible from anywhere. But why would we even
479 be passing an address in this case? Should we simply assert
480 this to be false, or should we have a cleanup pass that removes
481 these from the list of mappings? */
482 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
483 return true;
485 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
486 without analyzing the expression whether or not its location
487 is accessible to anyone else. In the case of nested parallel
488 regions it certainly may be. */
489 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
490 return true;
492 /* Do not use copy-in/copy-out for variables that have their
493 address taken. */
494 if (is_global_var (decl))
496 /* For file scope vars, track whether we've seen them as
497 non-addressable initially and in that case, keep the same
498 answer for the duration of the pass, even when they are made
499 addressable later on e.g. through reduction expansion. Global
500 variables which weren't addressable before the pass will not
501 have their privatized copies address taken. See PR91216. */
502 if (!TREE_ADDRESSABLE (decl))
504 if (!global_nonaddressable_vars)
505 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
506 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
508 else if (!global_nonaddressable_vars
509 || !bitmap_bit_p (global_nonaddressable_vars,
510 DECL_UID (decl)))
511 return true;
513 else if (TREE_ADDRESSABLE (decl))
514 return true;
516 /* lower_send_shared_vars only uses copy-in, but not copy-out
517 for these. */
518 if (TREE_READONLY (decl)
519 || ((TREE_CODE (decl) == RESULT_DECL
520 || TREE_CODE (decl) == PARM_DECL)
521 && DECL_BY_REFERENCE (decl)))
522 return false;
524 /* Disallow copy-in/out in nested parallel if
525 decl is shared in outer parallel, otherwise
526 each thread could store the shared variable
527 in its own copy-in location, making the
528 variable no longer really shared. */
529 if (shared_ctx->is_nested)
531 omp_context *up;
533 for (up = shared_ctx->outer; up; up = up->outer)
534 if ((is_taskreg_ctx (up)
535 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
536 && is_gimple_omp_offloaded (up->stmt)))
537 && maybe_lookup_decl (decl, up))
538 break;
540 if (up)
542 tree c;
544 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
546 for (c = gimple_omp_target_clauses (up->stmt);
547 c; c = OMP_CLAUSE_CHAIN (c))
548 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
549 && OMP_CLAUSE_DECL (c) == decl)
550 break;
552 else
553 for (c = gimple_omp_taskreg_clauses (up->stmt);
554 c; c = OMP_CLAUSE_CHAIN (c))
555 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
556 && OMP_CLAUSE_DECL (c) == decl)
557 break;
559 if (c)
560 goto maybe_mark_addressable_and_ret;
564 /* For tasks avoid using copy-in/out. As tasks can be
565 deferred or executed in different thread, when GOMP_task
566 returns, the task hasn't necessarily terminated. */
567 if (is_task_ctx (shared_ctx))
569 tree outer;
570 maybe_mark_addressable_and_ret:
571 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
572 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
574 /* Taking address of OUTER in lower_send_shared_vars
575 might need regimplification of everything that uses the
576 variable. */
577 if (!make_addressable_vars)
578 make_addressable_vars = BITMAP_ALLOC (NULL);
579 bitmap_set_bit (make_addressable_vars, DECL_UID (outer));
580 TREE_ADDRESSABLE (outer) = 1;
582 return true;
586 return false;
589 /* Construct a new automatic decl similar to VAR. */
591 static tree
592 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
594 tree copy = copy_var_decl (var, name, type);
596 DECL_CONTEXT (copy) = current_function_decl;
598 if (ctx)
600 DECL_CHAIN (copy) = ctx->block_vars;
601 ctx->block_vars = copy;
603 else
604 record_vars (copy);
606 /* If VAR is listed in make_addressable_vars, it wasn't
607 originally addressable, but was only later made so.
608 We don't need to take address of privatizations
609 from that var. */
610 if (TREE_ADDRESSABLE (var)
611 && ((make_addressable_vars
612 && bitmap_bit_p (make_addressable_vars, DECL_UID (var)))
613 || (global_nonaddressable_vars
614 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
615 TREE_ADDRESSABLE (copy) = 0;
617 return copy;
620 static tree
621 omp_copy_decl_1 (tree var, omp_context *ctx)
623 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
626 /* Build tree nodes to access the field for VAR on the receiver side. */
628 static tree
629 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
631 tree x, field = lookup_field (var, ctx);
633 /* If the receiver record type was remapped in the child function,
634 remap the field into the new record type. */
635 x = maybe_lookup_field (field, ctx);
636 if (x != NULL)
637 field = x;
639 x = build_simple_mem_ref (ctx->receiver_decl);
640 TREE_THIS_NOTRAP (x) = 1;
641 x = omp_build_component_ref (x, field);
642 if (by_ref)
644 x = build_simple_mem_ref (x);
645 TREE_THIS_NOTRAP (x) = 1;
648 return x;
651 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
652 of a parallel, this is a component reference; for workshare constructs
653 this is some variable. */
655 static tree
656 build_outer_var_ref (tree var, omp_context *ctx,
657 enum omp_clause_code code = OMP_CLAUSE_ERROR)
659 tree x;
660 omp_context *outer = ctx->outer;
661 for (; outer; outer = outer->outer)
663 if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
664 continue;
665 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
666 && !maybe_lookup_decl (var, outer))
667 continue;
668 break;
671 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
672 x = var;
673 else if (is_variable_sized (var))
675 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
676 x = build_outer_var_ref (x, ctx, code);
677 x = build_simple_mem_ref (x);
679 else if (is_taskreg_ctx (ctx))
681 bool by_ref = use_pointer_for_field (var, NULL);
682 x = build_receiver_ref (var, by_ref, ctx);
684 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
685 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
686 || ctx->loop_p
687 || code == OMP_CLAUSE_ALLOCATE
688 || (code == OMP_CLAUSE_PRIVATE
689 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
690 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
691 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
693 /* #pragma omp simd isn't a worksharing construct, and can reference
694 even private vars in its linear etc. clauses.
695 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
696 to private vars in all worksharing constructs. */
697 x = NULL_TREE;
698 if (outer && is_taskreg_ctx (outer))
699 x = lookup_decl (var, outer);
700 else if (outer)
701 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
702 if (x == NULL_TREE)
703 x = var;
705 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
707 gcc_assert (outer);
708 splay_tree_node n
709 = splay_tree_lookup (outer->field_map,
710 (splay_tree_key) &DECL_UID (var));
711 if (n == NULL)
713 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
714 x = var;
715 else
716 x = lookup_decl (var, outer);
718 else
720 tree field = (tree) n->value;
721 /* If the receiver record type was remapped in the child function,
722 remap the field into the new record type. */
723 x = maybe_lookup_field (field, outer);
724 if (x != NULL)
725 field = x;
727 x = build_simple_mem_ref (outer->receiver_decl);
728 x = omp_build_component_ref (x, field);
729 if (use_pointer_for_field (var, outer))
730 x = build_simple_mem_ref (x);
733 else if (outer)
734 x = lookup_decl (var, outer);
735 else if (omp_privatize_by_reference (var))
736 /* This can happen with orphaned constructs. If var is reference, it is
737 possible it is shared and as such valid. */
738 x = var;
739 else if (omp_member_access_dummy_var (var))
740 x = var;
741 else
742 gcc_unreachable ();
744 if (x == var)
746 tree t = omp_member_access_dummy_var (var);
747 if (t)
749 x = DECL_VALUE_EXPR (var);
750 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
751 if (o != t)
752 x = unshare_and_remap (x, t, o);
753 else
754 x = unshare_expr (x);
758 if (omp_privatize_by_reference (var))
759 x = build_simple_mem_ref (x);
761 return x;
764 /* Build tree nodes to access the field for VAR on the sender side. */
766 static tree
767 build_sender_ref (splay_tree_key key, omp_context *ctx)
769 tree field = lookup_sfield (key, ctx);
770 return omp_build_component_ref (ctx->sender_decl, field);
773 static tree
774 build_sender_ref (tree var, omp_context *ctx)
776 return build_sender_ref ((splay_tree_key) var, ctx);
779 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
780 BASE_POINTERS_RESTRICT, declare the field with restrict. */
782 static void
783 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
785 tree field, type, sfield = NULL_TREE;
786 splay_tree_key key = (splay_tree_key) var;
788 if ((mask & 16) != 0)
790 key = (splay_tree_key) &DECL_NAME (var);
791 gcc_checking_assert (key != (splay_tree_key) var);
793 if ((mask & 8) != 0)
795 key = (splay_tree_key) &DECL_UID (var);
796 gcc_checking_assert (key != (splay_tree_key) var);
798 gcc_assert ((mask & 1) == 0
799 || !splay_tree_lookup (ctx->field_map, key));
800 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
801 || !splay_tree_lookup (ctx->sfield_map, key));
802 gcc_assert ((mask & 3) == 3
803 || !is_gimple_omp_oacc (ctx->stmt));
805 type = TREE_TYPE (var);
806 if ((mask & 16) != 0)
807 type = lang_hooks.decls.omp_array_data (var, true);
809 /* Prevent redeclaring the var in the split-off function with a restrict
810 pointer type. Note that we only clear type itself, restrict qualifiers in
811 the pointed-to type will be ignored by points-to analysis. */
812 if (POINTER_TYPE_P (type)
813 && TYPE_RESTRICT (type))
814 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
816 if (mask & 4)
818 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
819 type = build_pointer_type (build_pointer_type (type));
821 else if (by_ref)
822 type = build_pointer_type (type);
823 else if ((mask & (32 | 3)) == 1
824 && omp_privatize_by_reference (var))
825 type = TREE_TYPE (type);
827 field = build_decl (DECL_SOURCE_LOCATION (var),
828 FIELD_DECL, DECL_NAME (var), type);
830 /* Remember what variable this field was created for. This does have a
831 side effect of making dwarf2out ignore this member, so for helpful
832 debugging we clear it later in delete_omp_context. */
833 DECL_ABSTRACT_ORIGIN (field) = var;
834 if ((mask & 16) == 0 && type == TREE_TYPE (var))
836 SET_DECL_ALIGN (field, DECL_ALIGN (var));
837 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
838 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
840 else
841 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
843 if ((mask & 3) == 3)
845 insert_field_into_struct (ctx->record_type, field);
846 if (ctx->srecord_type)
848 sfield = build_decl (DECL_SOURCE_LOCATION (var),
849 FIELD_DECL, DECL_NAME (var), type);
850 DECL_ABSTRACT_ORIGIN (sfield) = var;
851 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
852 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
853 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
854 insert_field_into_struct (ctx->srecord_type, sfield);
857 else
859 if (ctx->srecord_type == NULL_TREE)
861 tree t;
863 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
864 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
865 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
867 sfield = build_decl (DECL_SOURCE_LOCATION (t),
868 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
869 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
870 insert_field_into_struct (ctx->srecord_type, sfield);
871 splay_tree_insert (ctx->sfield_map,
872 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
873 (splay_tree_value) sfield);
876 sfield = field;
877 insert_field_into_struct ((mask & 1) ? ctx->record_type
878 : ctx->srecord_type, field);
881 if (mask & 1)
882 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
883 if ((mask & 2) && ctx->sfield_map)
884 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
887 static tree
888 install_var_local (tree var, omp_context *ctx)
890 tree new_var = omp_copy_decl_1 (var, ctx);
891 insert_decl_map (&ctx->cb, var, new_var);
892 return new_var;
895 /* Adjust the replacement for DECL in CTX for the new context. This means
896 copying the DECL_VALUE_EXPR, and fixing up the type. */
898 static void
899 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
901 tree new_decl, size;
903 new_decl = lookup_decl (decl, ctx);
905 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
907 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
908 && DECL_HAS_VALUE_EXPR_P (decl))
910 tree ve = DECL_VALUE_EXPR (decl);
911 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
912 SET_DECL_VALUE_EXPR (new_decl, ve);
913 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
916 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
918 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
919 if (size == error_mark_node)
920 size = TYPE_SIZE (TREE_TYPE (new_decl));
921 DECL_SIZE (new_decl) = size;
923 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
924 if (size == error_mark_node)
925 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
926 DECL_SIZE_UNIT (new_decl) = size;
930 /* The callback for remap_decl. Search all containing contexts for a
931 mapping of the variable; this avoids having to duplicate the splay
932 tree ahead of time. We know a mapping doesn't already exist in the
933 given context. Create new mappings to implement default semantics. */
935 static tree
936 omp_copy_decl (tree var, copy_body_data *cb)
938 omp_context *ctx = (omp_context *) cb;
939 tree new_var;
941 if (TREE_CODE (var) == LABEL_DECL)
943 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
944 return var;
945 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
946 DECL_CONTEXT (new_var) = current_function_decl;
947 insert_decl_map (&ctx->cb, var, new_var);
948 return new_var;
951 while (!is_taskreg_ctx (ctx))
953 ctx = ctx->outer;
954 if (ctx == NULL)
955 return var;
956 new_var = maybe_lookup_decl (var, ctx);
957 if (new_var)
958 return new_var;
961 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
962 return var;
964 return error_mark_node;
967 /* Create a new context, with OUTER_CTX being the surrounding context. */
969 static omp_context *
970 new_omp_context (gimple *stmt, omp_context *outer_ctx)
972 omp_context *ctx = XCNEW (omp_context);
974 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
975 (splay_tree_value) ctx);
976 ctx->stmt = stmt;
978 if (outer_ctx)
980 ctx->outer = outer_ctx;
981 ctx->cb = outer_ctx->cb;
982 ctx->cb.block = NULL;
983 ctx->depth = outer_ctx->depth + 1;
985 else
987 ctx->cb.src_fn = current_function_decl;
988 ctx->cb.dst_fn = current_function_decl;
989 ctx->cb.src_node = cgraph_node::get (current_function_decl);
990 gcc_checking_assert (ctx->cb.src_node);
991 ctx->cb.dst_node = ctx->cb.src_node;
992 ctx->cb.src_cfun = cfun;
993 ctx->cb.copy_decl = omp_copy_decl;
994 ctx->cb.eh_lp_nr = 0;
995 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
996 ctx->cb.adjust_array_error_bounds = true;
997 ctx->cb.dont_remap_vla_if_no_change = true;
998 ctx->depth = 1;
1001 ctx->cb.decl_map = new hash_map<tree, tree>;
1003 return ctx;
1006 static gimple_seq maybe_catch_exception (gimple_seq);
1008 /* Finalize task copyfn. */
1010 static void
1011 finalize_task_copyfn (gomp_task *task_stmt)
1013 struct function *child_cfun;
1014 tree child_fn;
1015 gimple_seq seq = NULL, new_seq;
1016 gbind *bind;
1018 child_fn = gimple_omp_task_copy_fn (task_stmt);
1019 if (child_fn == NULL_TREE)
1020 return;
1022 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1023 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1025 push_cfun (child_cfun);
1026 bind = gimplify_body (child_fn, false);
1027 gimple_seq_add_stmt (&seq, bind);
1028 new_seq = maybe_catch_exception (seq);
1029 if (new_seq != seq)
1031 bind = gimple_build_bind (NULL, new_seq, NULL);
1032 seq = NULL;
1033 gimple_seq_add_stmt (&seq, bind);
1035 gimple_set_body (child_fn, seq);
1036 pop_cfun ();
1038 /* Inform the callgraph about the new function. */
1039 cgraph_node *node = cgraph_node::get_create (child_fn);
1040 node->parallelized_function = 1;
1041 cgraph_node::add_new_function (child_fn, false);
1044 /* Destroy a omp_context data structures. Called through the splay tree
1045 value delete callback. */
1047 static void
1048 delete_omp_context (splay_tree_value value)
1050 omp_context *ctx = (omp_context *) value;
1052 delete ctx->cb.decl_map;
1054 if (ctx->field_map)
1055 splay_tree_delete (ctx->field_map);
1056 if (ctx->sfield_map)
1057 splay_tree_delete (ctx->sfield_map);
1059 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1060 it produces corrupt debug information. */
1061 if (ctx->record_type)
1063 tree t;
1064 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1065 DECL_ABSTRACT_ORIGIN (t) = NULL;
1067 if (ctx->srecord_type)
1069 tree t;
1070 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1071 DECL_ABSTRACT_ORIGIN (t) = NULL;
1074 if (ctx->task_reduction_map)
1076 ctx->task_reductions.release ();
1077 delete ctx->task_reduction_map;
1080 delete ctx->lastprivate_conditional_map;
1081 delete ctx->allocate_map;
1083 XDELETE (ctx);
1086 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1087 context. */
1089 static void
1090 fixup_child_record_type (omp_context *ctx)
1092 tree f, type = ctx->record_type;
1094 if (!ctx->receiver_decl)
1095 return;
1096 /* ??? It isn't sufficient to just call remap_type here, because
1097 variably_modified_type_p doesn't work the way we expect for
1098 record types. Testing each field for whether it needs remapping
1099 and creating a new record by hand works, however. */
1100 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1101 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1102 break;
1103 if (f)
1105 tree name, new_fields = NULL;
1107 type = lang_hooks.types.make_type (RECORD_TYPE);
1108 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1109 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1110 TYPE_DECL, name, type);
1111 TYPE_NAME (type) = name;
1113 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1115 tree new_f = copy_node (f);
1116 DECL_CONTEXT (new_f) = type;
1117 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1118 DECL_CHAIN (new_f) = new_fields;
1119 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1120 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1121 &ctx->cb, NULL);
1122 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1123 &ctx->cb, NULL);
1124 new_fields = new_f;
1126 /* Arrange to be able to look up the receiver field
1127 given the sender field. */
1128 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1129 (splay_tree_value) new_f);
1131 TYPE_FIELDS (type) = nreverse (new_fields);
1132 layout_type (type);
1135 /* In a target region we never modify any of the pointers in *.omp_data_i,
1136 so attempt to help the optimizers. */
1137 if (is_gimple_omp_offloaded (ctx->stmt))
1138 type = build_qualified_type (type, TYPE_QUAL_CONST);
1140 TREE_TYPE (ctx->receiver_decl)
1141 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1144 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1145 specified by CLAUSES. */
1147 static void
1148 scan_sharing_clauses (tree clauses, omp_context *ctx)
1150 tree c, decl;
1151 bool scan_array_reductions = false;
1153 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1154 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1155 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1156 /* omp_default_mem_alloc is 1 */
1157 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1158 || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1160 /* The allocate clauses that appear on a target construct or on
1161 constructs in a target region must specify an allocator expression
1162 unless a requires directive with the dynamic_allocators clause
1163 is present in the same compilation unit. */
1164 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1165 && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS) == 0)
1166 && omp_maybe_offloaded_ctx (ctx))
1167 error_at (OMP_CLAUSE_LOCATION (c), "%<allocate%> clause must"
1168 " specify an allocator here");
1169 if (ctx->allocate_map == NULL)
1170 ctx->allocate_map = new hash_map<tree, tree>;
1171 tree val = integer_zero_node;
1172 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1173 val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1174 if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1175 val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1176 ctx->allocate_map->put (OMP_CLAUSE_DECL (c), val);
1179 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1181 bool by_ref;
1183 switch (OMP_CLAUSE_CODE (c))
1185 case OMP_CLAUSE_PRIVATE:
1186 decl = OMP_CLAUSE_DECL (c);
1187 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1188 goto do_private;
1189 else if (!is_variable_sized (decl))
1190 install_var_local (decl, ctx);
1191 break;
1193 case OMP_CLAUSE_SHARED:
1194 decl = OMP_CLAUSE_DECL (c);
1195 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1196 ctx->allocate_map->remove (decl);
1197 /* Ignore shared directives in teams construct inside of
1198 target construct. */
1199 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1200 && !is_host_teams_ctx (ctx))
1202 /* Global variables don't need to be copied,
1203 the receiver side will use them directly. */
1204 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1205 if (is_global_var (odecl))
1206 break;
1207 insert_decl_map (&ctx->cb, decl, odecl);
1208 break;
1210 gcc_assert (is_taskreg_ctx (ctx));
1211 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1212 || !is_variable_sized (decl));
1213 /* Global variables don't need to be copied,
1214 the receiver side will use them directly. */
1215 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1216 break;
1217 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1219 use_pointer_for_field (decl, ctx);
1220 break;
1222 by_ref = use_pointer_for_field (decl, NULL);
1223 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1224 || TREE_ADDRESSABLE (decl)
1225 || by_ref
1226 || omp_privatize_by_reference (decl))
1228 by_ref = use_pointer_for_field (decl, ctx);
1229 install_var_field (decl, by_ref, 3, ctx);
1230 install_var_local (decl, ctx);
1231 break;
1233 /* We don't need to copy const scalar vars back. */
1234 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1235 goto do_private;
1237 case OMP_CLAUSE_REDUCTION:
1238 /* Collect 'reduction' clauses on OpenACC compute construct. */
1239 if (is_gimple_omp_oacc (ctx->stmt)
1240 && is_gimple_omp_offloaded (ctx->stmt))
1242 /* No 'reduction' clauses on OpenACC 'kernels'. */
1243 gcc_checking_assert (!is_oacc_kernels (ctx));
1244 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1245 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1247 ctx->local_reduction_clauses
1248 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1250 /* FALLTHRU */
1252 case OMP_CLAUSE_IN_REDUCTION:
1253 decl = OMP_CLAUSE_DECL (c);
1254 if (ctx->allocate_map
1255 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1256 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1257 || OMP_CLAUSE_REDUCTION_TASK (c)))
1258 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1259 || is_task_ctx (ctx)))
1261 /* For now. */
1262 if (ctx->allocate_map->get (decl))
1263 ctx->allocate_map->remove (decl);
1265 if (TREE_CODE (decl) == MEM_REF)
1267 tree t = TREE_OPERAND (decl, 0);
1268 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1269 t = TREE_OPERAND (t, 0);
1270 if (TREE_CODE (t) == INDIRECT_REF
1271 || TREE_CODE (t) == ADDR_EXPR)
1272 t = TREE_OPERAND (t, 0);
1273 if (is_omp_target (ctx->stmt))
1275 if (is_variable_sized (t))
1277 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1278 t = DECL_VALUE_EXPR (t);
1279 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
1280 t = TREE_OPERAND (t, 0);
1281 gcc_assert (DECL_P (t));
1283 tree at = t;
1284 if (ctx->outer)
1285 scan_omp_op (&at, ctx->outer);
1286 tree nt = omp_copy_decl_1 (at, ctx->outer);
1287 splay_tree_insert (ctx->field_map,
1288 (splay_tree_key) &DECL_CONTEXT (t),
1289 (splay_tree_value) nt);
1290 if (at != t)
1291 splay_tree_insert (ctx->field_map,
1292 (splay_tree_key) &DECL_CONTEXT (at),
1293 (splay_tree_value) nt);
1294 break;
1296 install_var_local (t, ctx);
1297 if (is_taskreg_ctx (ctx)
1298 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1299 || (is_task_ctx (ctx)
1300 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1301 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1302 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1303 == POINTER_TYPE)))))
1304 && !is_variable_sized (t)
1305 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1306 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1307 && !is_task_ctx (ctx))))
1309 by_ref = use_pointer_for_field (t, NULL);
1310 if (is_task_ctx (ctx)
1311 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1312 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1314 install_var_field (t, false, 1, ctx);
1315 install_var_field (t, by_ref, 2, ctx);
1317 else
1318 install_var_field (t, by_ref, 3, ctx);
1320 break;
1322 if (is_omp_target (ctx->stmt))
1324 tree at = decl;
1325 if (ctx->outer)
1326 scan_omp_op (&at, ctx->outer);
1327 tree nt = omp_copy_decl_1 (at, ctx->outer);
1328 splay_tree_insert (ctx->field_map,
1329 (splay_tree_key) &DECL_CONTEXT (decl),
1330 (splay_tree_value) nt);
1331 if (at != decl)
1332 splay_tree_insert (ctx->field_map,
1333 (splay_tree_key) &DECL_CONTEXT (at),
1334 (splay_tree_value) nt);
1335 break;
1337 if (is_task_ctx (ctx)
1338 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1339 && OMP_CLAUSE_REDUCTION_TASK (c)
1340 && is_parallel_ctx (ctx)))
1342 /* Global variables don't need to be copied,
1343 the receiver side will use them directly. */
1344 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1346 by_ref = use_pointer_for_field (decl, ctx);
1347 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1348 install_var_field (decl, by_ref, 3, ctx);
1350 install_var_local (decl, ctx);
1351 break;
1353 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1354 && OMP_CLAUSE_REDUCTION_TASK (c))
1356 install_var_local (decl, ctx);
1357 break;
1359 goto do_private;
1361 case OMP_CLAUSE_LASTPRIVATE:
1362 /* Let the corresponding firstprivate clause create
1363 the variable. */
1364 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1365 break;
1366 /* FALLTHRU */
1368 case OMP_CLAUSE_FIRSTPRIVATE:
1369 case OMP_CLAUSE_LINEAR:
1370 decl = OMP_CLAUSE_DECL (c);
1371 do_private:
1372 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1373 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1374 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1375 && is_gimple_omp_offloaded (ctx->stmt))
1377 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1378 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR
1379 && lang_hooks.decls.omp_array_data (decl, true)))
1381 by_ref = !omp_privatize_by_reference (decl);
1382 install_var_field (decl, by_ref, 3, ctx);
1384 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1386 if (TREE_CODE (decl) == INDIRECT_REF)
1387 decl = TREE_OPERAND (decl, 0);
1388 install_var_field (decl, true, 3, ctx);
1390 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1391 install_var_field (decl, true, 3, ctx);
1392 else
1393 install_var_field (decl, false, 3, ctx);
1395 if (is_variable_sized (decl))
1397 if (is_task_ctx (ctx))
1399 if (ctx->allocate_map
1400 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1402 /* For now. */
1403 if (ctx->allocate_map->get (decl))
1404 ctx->allocate_map->remove (decl);
1406 install_var_field (decl, false, 1, ctx);
1408 break;
1410 else if (is_taskreg_ctx (ctx))
1412 bool global
1413 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1414 by_ref = use_pointer_for_field (decl, NULL);
1416 if (is_task_ctx (ctx)
1417 && (global || by_ref || omp_privatize_by_reference (decl)))
1419 if (ctx->allocate_map
1420 && ctx->allocate_map->get (decl))
1421 install_var_field (decl, by_ref, 32 | 1, ctx);
1422 else
1423 install_var_field (decl, false, 1, ctx);
1424 if (!global)
1425 install_var_field (decl, by_ref, 2, ctx);
1427 else if (!global)
1428 install_var_field (decl, by_ref, 3, ctx);
1430 install_var_local (decl, ctx);
1431 /* For descr arrays on target: firstprivatize data + attach ptr. */
1432 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1433 && is_gimple_omp_offloaded (ctx->stmt)
1434 && !is_gimple_omp_oacc (ctx->stmt)
1435 && lang_hooks.decls.omp_array_data (decl, true))
1437 install_var_field (decl, false, 16 | 3, ctx);
1438 install_var_field (decl, true, 8 | 3, ctx);
1440 break;
1442 case OMP_CLAUSE_USE_DEVICE_PTR:
1443 case OMP_CLAUSE_USE_DEVICE_ADDR:
1444 decl = OMP_CLAUSE_DECL (c);
1446 /* Fortran array descriptors. */
1447 if (lang_hooks.decls.omp_array_data (decl, true))
1448 install_var_field (decl, false, 19, ctx);
1449 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1450 && !omp_privatize_by_reference (decl)
1451 && !omp_is_allocatable_or_ptr (decl))
1452 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1453 install_var_field (decl, true, 11, ctx);
1454 else
1455 install_var_field (decl, false, 11, ctx);
1456 if (DECL_SIZE (decl)
1457 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1459 tree decl2 = DECL_VALUE_EXPR (decl);
1460 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1461 decl2 = TREE_OPERAND (decl2, 0);
1462 gcc_assert (DECL_P (decl2));
1463 install_var_local (decl2, ctx);
1465 install_var_local (decl, ctx);
1466 break;
1468 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1469 decl = OMP_CLAUSE_DECL (c);
1470 while (TREE_CODE (decl) == INDIRECT_REF
1471 || TREE_CODE (decl) == ARRAY_REF)
1472 decl = TREE_OPERAND (decl, 0);
1473 goto do_private;
1475 case OMP_CLAUSE_IS_DEVICE_PTR:
1476 decl = OMP_CLAUSE_DECL (c);
1477 goto do_private;
1479 case OMP_CLAUSE__LOOPTEMP_:
1480 case OMP_CLAUSE__REDUCTEMP_:
1481 gcc_assert (is_taskreg_ctx (ctx));
1482 decl = OMP_CLAUSE_DECL (c);
1483 install_var_field (decl, false, 3, ctx);
1484 install_var_local (decl, ctx);
1485 break;
1487 case OMP_CLAUSE_COPYPRIVATE:
1488 case OMP_CLAUSE_COPYIN:
1489 decl = OMP_CLAUSE_DECL (c);
1490 by_ref = use_pointer_for_field (decl, NULL);
1491 install_var_field (decl, by_ref, 3, ctx);
1492 break;
1494 case OMP_CLAUSE_FINAL:
1495 case OMP_CLAUSE_IF:
1496 case OMP_CLAUSE_NUM_THREADS:
1497 case OMP_CLAUSE_NUM_TEAMS:
1498 case OMP_CLAUSE_THREAD_LIMIT:
1499 case OMP_CLAUSE_DEVICE:
1500 case OMP_CLAUSE_SCHEDULE:
1501 case OMP_CLAUSE_DIST_SCHEDULE:
1502 case OMP_CLAUSE_DEPEND:
1503 case OMP_CLAUSE_PRIORITY:
1504 case OMP_CLAUSE_GRAINSIZE:
1505 case OMP_CLAUSE_NUM_TASKS:
1506 case OMP_CLAUSE_NUM_GANGS:
1507 case OMP_CLAUSE_NUM_WORKERS:
1508 case OMP_CLAUSE_VECTOR_LENGTH:
1509 case OMP_CLAUSE_DETACH:
1510 case OMP_CLAUSE_FILTER:
1511 if (ctx->outer)
1512 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1513 break;
1515 case OMP_CLAUSE_TO:
1516 case OMP_CLAUSE_FROM:
1517 case OMP_CLAUSE_MAP:
1518 if (ctx->outer)
1519 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1520 decl = OMP_CLAUSE_DECL (c);
1521 /* If requested, make 'decl' addressable. */
1522 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1523 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c))
1525 gcc_checking_assert (DECL_P (decl));
1527 bool decl_addressable = TREE_ADDRESSABLE (decl);
1528 if (!decl_addressable)
1530 if (!make_addressable_vars)
1531 make_addressable_vars = BITMAP_ALLOC (NULL);
1532 bitmap_set_bit (make_addressable_vars, DECL_UID (decl));
1533 TREE_ADDRESSABLE (decl) = 1;
1536 if (dump_enabled_p ())
1538 location_t loc = OMP_CLAUSE_LOCATION (c);
1539 const dump_user_location_t d_u_loc
1540 = dump_user_location_t::from_location_t (loc);
1541 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1542 #if __GNUC__ >= 10
1543 # pragma GCC diagnostic push
1544 # pragma GCC diagnostic ignored "-Wformat"
1545 #endif
1546 if (!decl_addressable)
1547 dump_printf_loc (MSG_NOTE, d_u_loc,
1548 "variable %<%T%>"
1549 " made addressable\n",
1550 decl);
1551 else
1552 dump_printf_loc (MSG_NOTE, d_u_loc,
1553 "variable %<%T%>"
1554 " already made addressable\n",
1555 decl);
1556 #if __GNUC__ >= 10
1557 # pragma GCC diagnostic pop
1558 #endif
1561 /* Done. */
1562 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c) = 0;
1564 /* Global variables with "omp declare target" attribute
1565 don't need to be copied, the receiver side will use them
1566 directly. However, global variables with "omp declare target link"
1567 attribute need to be copied. Or when ALWAYS modifier is used. */
1568 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1569 && DECL_P (decl)
1570 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1571 && (OMP_CLAUSE_MAP_KIND (c)
1572 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1573 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1574 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1575 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1576 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1577 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1578 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1579 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1580 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1581 && varpool_node::get_create (decl)->offloadable
1582 && !lookup_attribute ("omp declare target link",
1583 DECL_ATTRIBUTES (decl)))
1584 break;
1585 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1586 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1588 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1589 not offloaded; there is nothing to map for those. */
1590 if (!is_gimple_omp_offloaded (ctx->stmt)
1591 && !POINTER_TYPE_P (TREE_TYPE (decl))
1592 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1593 break;
1595 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1596 && DECL_P (decl)
1597 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1598 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1599 && is_omp_target (ctx->stmt))
1601 /* If this is an offloaded region, an attach operation should
1602 only exist when the pointer variable is mapped in a prior
1603 clause.
1604 If we had an error, we may not have attempted to sort clauses
1605 properly, so avoid the test. */
1606 if (is_gimple_omp_offloaded (ctx->stmt)
1607 && !seen_error ())
1608 gcc_assert
1609 (maybe_lookup_decl (decl, ctx)
1610 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1611 && lookup_attribute ("omp declare target",
1612 DECL_ATTRIBUTES (decl))));
1614 /* By itself, attach/detach is generated as part of pointer
1615 variable mapping and should not create new variables in the
1616 offloaded region, however sender refs for it must be created
1617 for its address to be passed to the runtime. */
1618 tree field
1619 = build_decl (OMP_CLAUSE_LOCATION (c),
1620 FIELD_DECL, NULL_TREE, ptr_type_node);
1621 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1622 insert_field_into_struct (ctx->record_type, field);
1623 /* To not clash with a map of the pointer variable itself,
1624 attach/detach maps have their field looked up by the *clause*
1625 tree expression, not the decl. */
1626 gcc_assert (!splay_tree_lookup (ctx->field_map,
1627 (splay_tree_key) c));
1628 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1629 (splay_tree_value) field);
1630 break;
1632 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1633 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1634 || (OMP_CLAUSE_MAP_KIND (c)
1635 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1637 if (TREE_CODE (decl) == COMPONENT_REF
1638 || (TREE_CODE (decl) == INDIRECT_REF
1639 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1640 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1641 == REFERENCE_TYPE)
1642 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1643 == POINTER_TYPE)))))
1644 break;
1645 if (DECL_SIZE (decl)
1646 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1648 tree decl2 = DECL_VALUE_EXPR (decl);
1649 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1650 decl2 = TREE_OPERAND (decl2, 0);
1651 gcc_assert (DECL_P (decl2));
1652 install_var_local (decl2, ctx);
1654 install_var_local (decl, ctx);
1655 break;
1657 if (DECL_P (decl))
1659 if (DECL_SIZE (decl)
1660 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1662 tree decl2 = DECL_VALUE_EXPR (decl);
1663 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1664 decl2 = TREE_OPERAND (decl2, 0);
1665 gcc_assert (DECL_P (decl2));
1666 install_var_field (decl2, true, 3, ctx);
1667 install_var_local (decl2, ctx);
1668 install_var_local (decl, ctx);
1670 else
1672 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1673 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1674 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1675 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1676 install_var_field (decl, true, 7, ctx);
1677 else
1678 install_var_field (decl, true, 3, ctx);
1679 if (is_gimple_omp_offloaded (ctx->stmt)
1680 && !(is_gimple_omp_oacc (ctx->stmt)
1681 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1682 install_var_local (decl, ctx);
1685 else
1687 tree base = get_base_address (decl);
1688 tree nc = OMP_CLAUSE_CHAIN (c);
1689 if (DECL_P (base)
1690 && nc != NULL_TREE
1691 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1692 && OMP_CLAUSE_DECL (nc) == base
1693 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1694 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1696 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1697 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1699 else
1701 if (ctx->outer)
1703 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1704 decl = OMP_CLAUSE_DECL (c);
1706 gcc_assert (!splay_tree_lookup (ctx->field_map,
1707 (splay_tree_key) decl));
1708 tree field
1709 = build_decl (OMP_CLAUSE_LOCATION (c),
1710 FIELD_DECL, NULL_TREE, ptr_type_node);
1711 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1712 insert_field_into_struct (ctx->record_type, field);
1713 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1714 (splay_tree_value) field);
1717 break;
1719 case OMP_CLAUSE_ORDER:
1720 ctx->order_concurrent = true;
1721 break;
1723 case OMP_CLAUSE_BIND:
1724 ctx->loop_p = true;
1725 break;
1727 case OMP_CLAUSE_NOWAIT:
1728 case OMP_CLAUSE_ORDERED:
1729 case OMP_CLAUSE_COLLAPSE:
1730 case OMP_CLAUSE_UNTIED:
1731 case OMP_CLAUSE_MERGEABLE:
1732 case OMP_CLAUSE_PROC_BIND:
1733 case OMP_CLAUSE_SAFELEN:
1734 case OMP_CLAUSE_SIMDLEN:
1735 case OMP_CLAUSE_THREADS:
1736 case OMP_CLAUSE_SIMD:
1737 case OMP_CLAUSE_NOGROUP:
1738 case OMP_CLAUSE_DEFAULTMAP:
1739 case OMP_CLAUSE_ASYNC:
1740 case OMP_CLAUSE_WAIT:
1741 case OMP_CLAUSE_GANG:
1742 case OMP_CLAUSE_WORKER:
1743 case OMP_CLAUSE_VECTOR:
1744 case OMP_CLAUSE_INDEPENDENT:
1745 case OMP_CLAUSE_AUTO:
1746 case OMP_CLAUSE_SEQ:
1747 case OMP_CLAUSE_TILE:
1748 case OMP_CLAUSE__SIMT_:
1749 case OMP_CLAUSE_DEFAULT:
1750 case OMP_CLAUSE_NONTEMPORAL:
1751 case OMP_CLAUSE_IF_PRESENT:
1752 case OMP_CLAUSE_FINALIZE:
1753 case OMP_CLAUSE_TASK_REDUCTION:
1754 case OMP_CLAUSE_ALLOCATE:
1755 break;
1757 case OMP_CLAUSE_ALIGNED:
1758 decl = OMP_CLAUSE_DECL (c);
1759 if (is_global_var (decl)
1760 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1761 install_var_local (decl, ctx);
1762 break;
1764 case OMP_CLAUSE__CONDTEMP_:
1765 decl = OMP_CLAUSE_DECL (c);
1766 if (is_parallel_ctx (ctx))
1768 install_var_field (decl, false, 3, ctx);
1769 install_var_local (decl, ctx);
1771 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1772 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1773 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1774 install_var_local (decl, ctx);
1775 break;
1777 case OMP_CLAUSE__CACHE_:
1778 case OMP_CLAUSE_NOHOST:
1779 default:
1780 gcc_unreachable ();
1784 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1786 switch (OMP_CLAUSE_CODE (c))
1788 case OMP_CLAUSE_LASTPRIVATE:
1789 /* Let the corresponding firstprivate clause create
1790 the variable. */
1791 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1792 scan_array_reductions = true;
1793 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1794 break;
1795 /* FALLTHRU */
1797 case OMP_CLAUSE_FIRSTPRIVATE:
1798 case OMP_CLAUSE_PRIVATE:
1799 case OMP_CLAUSE_LINEAR:
1800 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1801 case OMP_CLAUSE_IS_DEVICE_PTR:
1802 decl = OMP_CLAUSE_DECL (c);
1803 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1805 while (TREE_CODE (decl) == INDIRECT_REF
1806 || TREE_CODE (decl) == ARRAY_REF)
1807 decl = TREE_OPERAND (decl, 0);
1810 if (is_variable_sized (decl))
1812 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1813 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1814 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1815 && is_gimple_omp_offloaded (ctx->stmt))
1817 tree decl2 = DECL_VALUE_EXPR (decl);
1818 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1819 decl2 = TREE_OPERAND (decl2, 0);
1820 gcc_assert (DECL_P (decl2));
1821 install_var_local (decl2, ctx);
1822 fixup_remapped_decl (decl2, ctx, false);
1824 install_var_local (decl, ctx);
1826 fixup_remapped_decl (decl, ctx,
1827 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1828 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1829 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1830 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1831 scan_array_reductions = true;
1832 break;
1834 case OMP_CLAUSE_REDUCTION:
1835 case OMP_CLAUSE_IN_REDUCTION:
1836 decl = OMP_CLAUSE_DECL (c);
1837 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1839 if (is_variable_sized (decl))
1840 install_var_local (decl, ctx);
1841 fixup_remapped_decl (decl, ctx, false);
1843 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1844 scan_array_reductions = true;
1845 break;
1847 case OMP_CLAUSE_TASK_REDUCTION:
1848 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1849 scan_array_reductions = true;
1850 break;
1852 case OMP_CLAUSE_SHARED:
1853 /* Ignore shared directives in teams construct inside of
1854 target construct. */
1855 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1856 && !is_host_teams_ctx (ctx))
1857 break;
1858 decl = OMP_CLAUSE_DECL (c);
1859 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1860 break;
1861 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1863 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1864 ctx->outer)))
1865 break;
1866 bool by_ref = use_pointer_for_field (decl, ctx);
1867 install_var_field (decl, by_ref, 11, ctx);
1868 break;
1870 fixup_remapped_decl (decl, ctx, false);
1871 break;
1873 case OMP_CLAUSE_MAP:
1874 if (!is_gimple_omp_offloaded (ctx->stmt))
1875 break;
1876 decl = OMP_CLAUSE_DECL (c);
1877 if (DECL_P (decl)
1878 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1879 && (OMP_CLAUSE_MAP_KIND (c)
1880 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1881 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1882 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1883 && varpool_node::get_create (decl)->offloadable)
1884 break;
1885 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1886 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1887 && is_omp_target (ctx->stmt)
1888 && !is_gimple_omp_offloaded (ctx->stmt))
1889 break;
1890 if (DECL_P (decl))
1892 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1893 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1894 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1895 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1897 tree new_decl = lookup_decl (decl, ctx);
1898 TREE_TYPE (new_decl)
1899 = remap_type (TREE_TYPE (decl), &ctx->cb);
1901 else if (DECL_SIZE (decl)
1902 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1904 tree decl2 = DECL_VALUE_EXPR (decl);
1905 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1906 decl2 = TREE_OPERAND (decl2, 0);
1907 gcc_assert (DECL_P (decl2));
1908 fixup_remapped_decl (decl2, ctx, false);
1909 fixup_remapped_decl (decl, ctx, true);
1911 else
1912 fixup_remapped_decl (decl, ctx, false);
1914 break;
1916 case OMP_CLAUSE_COPYPRIVATE:
1917 case OMP_CLAUSE_COPYIN:
1918 case OMP_CLAUSE_DEFAULT:
1919 case OMP_CLAUSE_IF:
1920 case OMP_CLAUSE_NUM_THREADS:
1921 case OMP_CLAUSE_NUM_TEAMS:
1922 case OMP_CLAUSE_THREAD_LIMIT:
1923 case OMP_CLAUSE_DEVICE:
1924 case OMP_CLAUSE_SCHEDULE:
1925 case OMP_CLAUSE_DIST_SCHEDULE:
1926 case OMP_CLAUSE_NOWAIT:
1927 case OMP_CLAUSE_ORDERED:
1928 case OMP_CLAUSE_COLLAPSE:
1929 case OMP_CLAUSE_UNTIED:
1930 case OMP_CLAUSE_FINAL:
1931 case OMP_CLAUSE_MERGEABLE:
1932 case OMP_CLAUSE_PROC_BIND:
1933 case OMP_CLAUSE_SAFELEN:
1934 case OMP_CLAUSE_SIMDLEN:
1935 case OMP_CLAUSE_ALIGNED:
1936 case OMP_CLAUSE_DEPEND:
1937 case OMP_CLAUSE_DETACH:
1938 case OMP_CLAUSE_ALLOCATE:
1939 case OMP_CLAUSE__LOOPTEMP_:
1940 case OMP_CLAUSE__REDUCTEMP_:
1941 case OMP_CLAUSE_TO:
1942 case OMP_CLAUSE_FROM:
1943 case OMP_CLAUSE_PRIORITY:
1944 case OMP_CLAUSE_GRAINSIZE:
1945 case OMP_CLAUSE_NUM_TASKS:
1946 case OMP_CLAUSE_THREADS:
1947 case OMP_CLAUSE_SIMD:
1948 case OMP_CLAUSE_NOGROUP:
1949 case OMP_CLAUSE_DEFAULTMAP:
1950 case OMP_CLAUSE_ORDER:
1951 case OMP_CLAUSE_BIND:
1952 case OMP_CLAUSE_USE_DEVICE_PTR:
1953 case OMP_CLAUSE_USE_DEVICE_ADDR:
1954 case OMP_CLAUSE_NONTEMPORAL:
1955 case OMP_CLAUSE_ASYNC:
1956 case OMP_CLAUSE_WAIT:
1957 case OMP_CLAUSE_NUM_GANGS:
1958 case OMP_CLAUSE_NUM_WORKERS:
1959 case OMP_CLAUSE_VECTOR_LENGTH:
1960 case OMP_CLAUSE_GANG:
1961 case OMP_CLAUSE_WORKER:
1962 case OMP_CLAUSE_VECTOR:
1963 case OMP_CLAUSE_INDEPENDENT:
1964 case OMP_CLAUSE_AUTO:
1965 case OMP_CLAUSE_SEQ:
1966 case OMP_CLAUSE_TILE:
1967 case OMP_CLAUSE__SIMT_:
1968 case OMP_CLAUSE_IF_PRESENT:
1969 case OMP_CLAUSE_FINALIZE:
1970 case OMP_CLAUSE_FILTER:
1971 case OMP_CLAUSE__CONDTEMP_:
1972 break;
1974 case OMP_CLAUSE__CACHE_:
1975 case OMP_CLAUSE_NOHOST:
1976 default:
1977 gcc_unreachable ();
1981 gcc_checking_assert (!scan_array_reductions
1982 || !is_gimple_omp_oacc (ctx->stmt));
1983 if (scan_array_reductions)
1985 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1986 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1987 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1988 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1989 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1991 omp_context *rctx = ctx;
1992 if (is_omp_target (ctx->stmt))
1993 rctx = ctx->outer;
1994 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1995 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
1997 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1998 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1999 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
2000 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
2001 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
2002 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
2006 /* Create a new name for omp child function. Returns an identifier. */
2008 static tree
2009 create_omp_child_function_name (bool task_copy)
2011 return clone_function_name_numbered (current_function_decl,
2012 task_copy ? "_omp_cpyfn" : "_omp_fn");
2015 /* Return true if CTX may belong to offloaded code: either if current function
2016 is offloaded, or any enclosing context corresponds to a target region. */
2018 static bool
2019 omp_maybe_offloaded_ctx (omp_context *ctx)
2021 if (cgraph_node::get (current_function_decl)->offloadable)
2022 return true;
2023 for (; ctx; ctx = ctx->outer)
2024 if (is_gimple_omp_offloaded (ctx->stmt))
2025 return true;
2026 return false;
2029 /* Build a decl for the omp child function. It'll not contain a body
2030 yet, just the bare decl. */
2032 static void
2033 create_omp_child_function (omp_context *ctx, bool task_copy)
2035 tree decl, type, name, t;
2037 name = create_omp_child_function_name (task_copy);
2038 if (task_copy)
2039 type = build_function_type_list (void_type_node, ptr_type_node,
2040 ptr_type_node, NULL_TREE);
2041 else
2042 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2044 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
2046 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
2047 || !task_copy);
2048 if (!task_copy)
2049 ctx->cb.dst_fn = decl;
2050 else
2051 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
2053 TREE_STATIC (decl) = 1;
2054 TREE_USED (decl) = 1;
2055 DECL_ARTIFICIAL (decl) = 1;
2056 DECL_IGNORED_P (decl) = 0;
2057 TREE_PUBLIC (decl) = 0;
2058 DECL_UNINLINABLE (decl) = 1;
2059 DECL_EXTERNAL (decl) = 0;
2060 DECL_CONTEXT (decl) = NULL_TREE;
2061 DECL_INITIAL (decl) = make_node (BLOCK);
2062 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
2063 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
2064 /* Remove omp declare simd attribute from the new attributes. */
2065 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
2067 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
2068 a = a2;
2069 a = TREE_CHAIN (a);
2070 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
2071 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
2072 *p = TREE_CHAIN (*p);
2073 else
2075 tree chain = TREE_CHAIN (*p);
2076 *p = copy_node (*p);
2077 p = &TREE_CHAIN (*p);
2078 *p = chain;
2081 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
2082 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
2083 DECL_FUNCTION_SPECIFIC_TARGET (decl)
2084 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2085 DECL_FUNCTION_VERSIONED (decl)
2086 = DECL_FUNCTION_VERSIONED (current_function_decl);
2088 if (omp_maybe_offloaded_ctx (ctx))
2090 cgraph_node::get_create (decl)->offloadable = 1;
2091 if (ENABLE_OFFLOADING)
2092 g->have_offload = true;
2095 if (cgraph_node::get_create (decl)->offloadable)
2097 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2098 ? "omp target entrypoint"
2099 : "omp declare target");
2100 if (lookup_attribute ("omp declare target",
2101 DECL_ATTRIBUTES (current_function_decl)))
2103 if (is_gimple_omp_offloaded (ctx->stmt))
2104 DECL_ATTRIBUTES (decl)
2105 = remove_attribute ("omp declare target",
2106 copy_list (DECL_ATTRIBUTES (decl)));
2107 else
2108 target_attr = NULL;
2110 if (target_attr
2111 && is_gimple_omp_offloaded (ctx->stmt)
2112 && lookup_attribute ("noclone", DECL_ATTRIBUTES (decl)) == NULL_TREE)
2113 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("noclone"),
2114 NULL_TREE, DECL_ATTRIBUTES (decl));
2115 if (target_attr)
2116 DECL_ATTRIBUTES (decl)
2117 = tree_cons (get_identifier (target_attr),
2118 NULL_TREE, DECL_ATTRIBUTES (decl));
2121 t = build_decl (DECL_SOURCE_LOCATION (decl),
2122 RESULT_DECL, NULL_TREE, void_type_node);
2123 DECL_ARTIFICIAL (t) = 1;
2124 DECL_IGNORED_P (t) = 1;
2125 DECL_CONTEXT (t) = decl;
2126 DECL_RESULT (decl) = t;
2128 tree data_name = get_identifier (".omp_data_i");
2129 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2130 ptr_type_node);
2131 DECL_ARTIFICIAL (t) = 1;
2132 DECL_NAMELESS (t) = 1;
2133 DECL_ARG_TYPE (t) = ptr_type_node;
2134 DECL_CONTEXT (t) = current_function_decl;
2135 TREE_USED (t) = 1;
2136 TREE_READONLY (t) = 1;
2137 DECL_ARGUMENTS (decl) = t;
2138 if (!task_copy)
2139 ctx->receiver_decl = t;
2140 else
2142 t = build_decl (DECL_SOURCE_LOCATION (decl),
2143 PARM_DECL, get_identifier (".omp_data_o"),
2144 ptr_type_node);
2145 DECL_ARTIFICIAL (t) = 1;
2146 DECL_NAMELESS (t) = 1;
2147 DECL_ARG_TYPE (t) = ptr_type_node;
2148 DECL_CONTEXT (t) = current_function_decl;
2149 TREE_USED (t) = 1;
2150 TREE_ADDRESSABLE (t) = 1;
2151 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2152 DECL_ARGUMENTS (decl) = t;
2155 /* Allocate memory for the function structure. The call to
2156 allocate_struct_function clobbers CFUN, so we need to restore
2157 it afterward. */
2158 push_struct_function (decl);
2159 cfun->function_end_locus = gimple_location (ctx->stmt);
2160 init_tree_ssa (cfun);
2161 pop_cfun ();
2164 /* Callback for walk_gimple_seq. Check if combined parallel
2165 contains gimple_omp_for_combined_into_p OMP_FOR. */
2167 tree
2168 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2169 bool *handled_ops_p,
2170 struct walk_stmt_info *wi)
2172 gimple *stmt = gsi_stmt (*gsi_p);
2174 *handled_ops_p = true;
2175 switch (gimple_code (stmt))
2177 WALK_SUBSTMTS;
2179 case GIMPLE_OMP_FOR:
2180 if (gimple_omp_for_combined_into_p (stmt)
2181 && gimple_omp_for_kind (stmt)
2182 == *(const enum gf_mask *) (wi->info))
2184 wi->info = stmt;
2185 return integer_zero_node;
2187 break;
2188 default:
2189 break;
2191 return NULL;
2194 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2196 static void
2197 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2198 omp_context *outer_ctx)
2200 struct walk_stmt_info wi;
2202 memset (&wi, 0, sizeof (wi));
2203 wi.val_only = true;
2204 wi.info = (void *) &msk;
2205 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2206 if (wi.info != (void *) &msk)
2208 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2209 struct omp_for_data fd;
2210 omp_extract_for_data (for_stmt, &fd, NULL);
2211 /* We need two temporaries with fd.loop.v type (istart/iend)
2212 and then (fd.collapse - 1) temporaries with the same
2213 type for count2 ... countN-1 vars if not constant. */
2214 size_t count = 2, i;
2215 tree type = fd.iter_type;
2216 if (fd.collapse > 1
2217 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2219 count += fd.collapse - 1;
2220 /* If there are lastprivate clauses on the inner
2221 GIMPLE_OMP_FOR, add one more temporaries for the total number
2222 of iterations (product of count1 ... countN-1). */
2223 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2224 OMP_CLAUSE_LASTPRIVATE)
2225 || (msk == GF_OMP_FOR_KIND_FOR
2226 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2227 OMP_CLAUSE_LASTPRIVATE)))
2229 tree temp = create_tmp_var (type);
2230 tree c = build_omp_clause (UNKNOWN_LOCATION,
2231 OMP_CLAUSE__LOOPTEMP_);
2232 insert_decl_map (&outer_ctx->cb, temp, temp);
2233 OMP_CLAUSE_DECL (c) = temp;
2234 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2235 gimple_omp_taskreg_set_clauses (stmt, c);
2237 if (fd.non_rect
2238 && fd.last_nonrect == fd.first_nonrect + 1)
2239 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2240 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2242 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2243 tree type2 = TREE_TYPE (v);
2244 count++;
2245 for (i = 0; i < 3; i++)
2247 tree temp = create_tmp_var (type2);
2248 tree c = build_omp_clause (UNKNOWN_LOCATION,
2249 OMP_CLAUSE__LOOPTEMP_);
2250 insert_decl_map (&outer_ctx->cb, temp, temp);
2251 OMP_CLAUSE_DECL (c) = temp;
2252 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2253 gimple_omp_taskreg_set_clauses (stmt, c);
2257 for (i = 0; i < count; i++)
2259 tree temp = create_tmp_var (type);
2260 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2261 insert_decl_map (&outer_ctx->cb, temp, temp);
2262 OMP_CLAUSE_DECL (c) = temp;
2263 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2264 gimple_omp_taskreg_set_clauses (stmt, c);
2267 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2268 && omp_find_clause (gimple_omp_task_clauses (stmt),
2269 OMP_CLAUSE_REDUCTION))
2271 tree type = build_pointer_type (pointer_sized_int_node);
2272 tree temp = create_tmp_var (type);
2273 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2274 insert_decl_map (&outer_ctx->cb, temp, temp);
2275 OMP_CLAUSE_DECL (c) = temp;
2276 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2277 gimple_omp_task_set_clauses (stmt, c);
2281 /* Scan an OpenMP parallel directive. */
2283 static void
2284 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2286 omp_context *ctx;
2287 tree name;
2288 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2290 /* Ignore parallel directives with empty bodies, unless there
2291 are copyin clauses. */
2292 if (optimize > 0
2293 && empty_body_p (gimple_omp_body (stmt))
2294 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2295 OMP_CLAUSE_COPYIN) == NULL)
2297 gsi_replace (gsi, gimple_build_nop (), false);
2298 return;
2301 if (gimple_omp_parallel_combined_p (stmt))
2302 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2303 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2304 OMP_CLAUSE_REDUCTION);
2305 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2306 if (OMP_CLAUSE_REDUCTION_TASK (c))
2308 tree type = build_pointer_type (pointer_sized_int_node);
2309 tree temp = create_tmp_var (type);
2310 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2311 if (outer_ctx)
2312 insert_decl_map (&outer_ctx->cb, temp, temp);
2313 OMP_CLAUSE_DECL (c) = temp;
2314 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2315 gimple_omp_parallel_set_clauses (stmt, c);
2316 break;
2318 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2319 break;
2321 ctx = new_omp_context (stmt, outer_ctx);
2322 taskreg_contexts.safe_push (ctx);
2323 if (taskreg_nesting_level > 1)
2324 ctx->is_nested = true;
2325 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2326 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2327 name = create_tmp_var_name (".omp_data_s");
2328 name = build_decl (gimple_location (stmt),
2329 TYPE_DECL, name, ctx->record_type);
2330 DECL_ARTIFICIAL (name) = 1;
2331 DECL_NAMELESS (name) = 1;
2332 TYPE_NAME (ctx->record_type) = name;
2333 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2334 create_omp_child_function (ctx, false);
2335 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2337 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2338 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2340 if (TYPE_FIELDS (ctx->record_type) == NULL)
2341 ctx->record_type = ctx->receiver_decl = NULL;
2344 /* Scan an OpenMP task directive. */
2346 static void
2347 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2349 omp_context *ctx;
2350 tree name, t;
2351 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2353 /* Ignore task directives with empty bodies, unless they have depend
2354 clause. */
2355 if (optimize > 0
2356 && gimple_omp_body (stmt)
2357 && empty_body_p (gimple_omp_body (stmt))
2358 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2360 gsi_replace (gsi, gimple_build_nop (), false);
2361 return;
2364 if (gimple_omp_task_taskloop_p (stmt))
2365 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2367 ctx = new_omp_context (stmt, outer_ctx);
2369 if (gimple_omp_task_taskwait_p (stmt))
2371 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2372 return;
2375 taskreg_contexts.safe_push (ctx);
2376 if (taskreg_nesting_level > 1)
2377 ctx->is_nested = true;
2378 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2379 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2380 name = create_tmp_var_name (".omp_data_s");
2381 name = build_decl (gimple_location (stmt),
2382 TYPE_DECL, name, ctx->record_type);
2383 DECL_ARTIFICIAL (name) = 1;
2384 DECL_NAMELESS (name) = 1;
2385 TYPE_NAME (ctx->record_type) = name;
2386 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2387 create_omp_child_function (ctx, false);
2388 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2390 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2392 if (ctx->srecord_type)
2394 name = create_tmp_var_name (".omp_data_a");
2395 name = build_decl (gimple_location (stmt),
2396 TYPE_DECL, name, ctx->srecord_type);
2397 DECL_ARTIFICIAL (name) = 1;
2398 DECL_NAMELESS (name) = 1;
2399 TYPE_NAME (ctx->srecord_type) = name;
2400 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2401 create_omp_child_function (ctx, true);
2404 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2406 if (TYPE_FIELDS (ctx->record_type) == NULL)
2408 ctx->record_type = ctx->receiver_decl = NULL;
2409 t = build_int_cst (long_integer_type_node, 0);
2410 gimple_omp_task_set_arg_size (stmt, t);
2411 t = build_int_cst (long_integer_type_node, 1);
2412 gimple_omp_task_set_arg_align (stmt, t);
2416 /* Helper function for finish_taskreg_scan, called through walk_tree.
2417 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2418 tree, replace it in the expression. */
2420 static tree
2421 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2423 if (VAR_P (*tp))
2425 omp_context *ctx = (omp_context *) data;
2426 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2427 if (t != *tp)
2429 if (DECL_HAS_VALUE_EXPR_P (t))
2430 t = unshare_expr (DECL_VALUE_EXPR (t));
2431 *tp = t;
2433 *walk_subtrees = 0;
2435 else if (IS_TYPE_OR_DECL_P (*tp))
2436 *walk_subtrees = 0;
2437 return NULL_TREE;
2440 /* If any decls have been made addressable during scan_omp,
2441 adjust their fields if needed, and layout record types
2442 of parallel/task constructs. */
2444 static void
2445 finish_taskreg_scan (omp_context *ctx)
2447 if (ctx->record_type == NULL_TREE)
2448 return;
2450 /* If any make_addressable_vars were needed, verify all
2451 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2452 statements if use_pointer_for_field hasn't changed
2453 because of that. If it did, update field types now. */
2454 if (make_addressable_vars)
2456 tree c;
2458 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2459 c; c = OMP_CLAUSE_CHAIN (c))
2460 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2461 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2463 tree decl = OMP_CLAUSE_DECL (c);
2465 /* Global variables don't need to be copied,
2466 the receiver side will use them directly. */
2467 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2468 continue;
2469 if (!bitmap_bit_p (make_addressable_vars, DECL_UID (decl))
2470 || !use_pointer_for_field (decl, ctx))
2471 continue;
2472 tree field = lookup_field (decl, ctx);
2473 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2474 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2475 continue;
2476 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2477 TREE_THIS_VOLATILE (field) = 0;
2478 DECL_USER_ALIGN (field) = 0;
2479 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2480 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2481 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2482 if (ctx->srecord_type)
2484 tree sfield = lookup_sfield (decl, ctx);
2485 TREE_TYPE (sfield) = TREE_TYPE (field);
2486 TREE_THIS_VOLATILE (sfield) = 0;
2487 DECL_USER_ALIGN (sfield) = 0;
2488 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2489 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2490 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2495 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2497 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2498 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2499 if (c)
2501 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2502 expects to find it at the start of data. */
2503 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2504 tree *p = &TYPE_FIELDS (ctx->record_type);
2505 while (*p)
2506 if (*p == f)
2508 *p = DECL_CHAIN (*p);
2509 break;
2511 else
2512 p = &DECL_CHAIN (*p);
2513 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2514 TYPE_FIELDS (ctx->record_type) = f;
2516 layout_type (ctx->record_type);
2517 fixup_child_record_type (ctx);
2519 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2521 layout_type (ctx->record_type);
2522 fixup_child_record_type (ctx);
2524 else
2526 location_t loc = gimple_location (ctx->stmt);
2527 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2528 tree detach_clause
2529 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2530 OMP_CLAUSE_DETACH);
2531 /* Move VLA fields to the end. */
2532 p = &TYPE_FIELDS (ctx->record_type);
2533 while (*p)
2534 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2535 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2537 *q = *p;
2538 *p = TREE_CHAIN (*p);
2539 TREE_CHAIN (*q) = NULL_TREE;
2540 q = &TREE_CHAIN (*q);
2542 else
2543 p = &DECL_CHAIN (*p);
2544 *p = vla_fields;
2545 if (gimple_omp_task_taskloop_p (ctx->stmt))
2547 /* Move fields corresponding to first and second _looptemp_
2548 clause first. There are filled by GOMP_taskloop
2549 and thus need to be in specific positions. */
2550 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2551 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2552 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2553 OMP_CLAUSE__LOOPTEMP_);
2554 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2555 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2556 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2557 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2558 p = &TYPE_FIELDS (ctx->record_type);
2559 while (*p)
2560 if (*p == f1 || *p == f2 || *p == f3)
2561 *p = DECL_CHAIN (*p);
2562 else
2563 p = &DECL_CHAIN (*p);
2564 DECL_CHAIN (f1) = f2;
2565 if (c3)
2567 DECL_CHAIN (f2) = f3;
2568 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2570 else
2571 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2572 TYPE_FIELDS (ctx->record_type) = f1;
2573 if (ctx->srecord_type)
2575 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2576 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2577 if (c3)
2578 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2579 p = &TYPE_FIELDS (ctx->srecord_type);
2580 while (*p)
2581 if (*p == f1 || *p == f2 || *p == f3)
2582 *p = DECL_CHAIN (*p);
2583 else
2584 p = &DECL_CHAIN (*p);
2585 DECL_CHAIN (f1) = f2;
2586 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2587 if (c3)
2589 DECL_CHAIN (f2) = f3;
2590 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2592 else
2593 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2594 TYPE_FIELDS (ctx->srecord_type) = f1;
2597 if (detach_clause)
2599 tree c, field;
2601 /* Look for a firstprivate clause with the detach event handle. */
2602 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2603 c; c = OMP_CLAUSE_CHAIN (c))
2605 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2606 continue;
2607 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2608 == OMP_CLAUSE_DECL (detach_clause))
2609 break;
2612 gcc_assert (c);
2613 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2615 /* Move field corresponding to the detach clause first.
2616 This is filled by GOMP_task and needs to be in a
2617 specific position. */
2618 p = &TYPE_FIELDS (ctx->record_type);
2619 while (*p)
2620 if (*p == field)
2621 *p = DECL_CHAIN (*p);
2622 else
2623 p = &DECL_CHAIN (*p);
2624 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2625 TYPE_FIELDS (ctx->record_type) = field;
2626 if (ctx->srecord_type)
2628 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2629 p = &TYPE_FIELDS (ctx->srecord_type);
2630 while (*p)
2631 if (*p == field)
2632 *p = DECL_CHAIN (*p);
2633 else
2634 p = &DECL_CHAIN (*p);
2635 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2636 TYPE_FIELDS (ctx->srecord_type) = field;
2639 layout_type (ctx->record_type);
2640 fixup_child_record_type (ctx);
2641 if (ctx->srecord_type)
2642 layout_type (ctx->srecord_type);
2643 tree t = fold_convert_loc (loc, long_integer_type_node,
2644 TYPE_SIZE_UNIT (ctx->record_type));
2645 if (TREE_CODE (t) != INTEGER_CST)
2647 t = unshare_expr (t);
2648 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2650 gimple_omp_task_set_arg_size (ctx->stmt, t);
2651 t = build_int_cst (long_integer_type_node,
2652 TYPE_ALIGN_UNIT (ctx->record_type));
2653 gimple_omp_task_set_arg_align (ctx->stmt, t);
2657 /* Find the enclosing offload context. */
2659 static omp_context *
2660 enclosing_target_ctx (omp_context *ctx)
2662 for (; ctx; ctx = ctx->outer)
2663 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2664 break;
2666 return ctx;
2669 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2670 construct.
2671 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2673 static bool
2674 ctx_in_oacc_kernels_region (omp_context *ctx)
2676 for (;ctx != NULL; ctx = ctx->outer)
2678 gimple *stmt = ctx->stmt;
2679 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2680 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2681 return true;
2684 return false;
2687 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2688 (This doesn't include OpenACC 'kernels' decomposed parts.)
2689 Until kernels handling moves to use the same loop indirection
2690 scheme as parallel, we need to do this checking early. */
2692 static unsigned
2693 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2695 bool checking = true;
2696 unsigned outer_mask = 0;
2697 unsigned this_mask = 0;
2698 bool has_seq = false, has_auto = false;
2700 if (ctx->outer)
2701 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2702 if (!stmt)
2704 checking = false;
2705 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2706 return outer_mask;
2707 stmt = as_a <gomp_for *> (ctx->stmt);
2710 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2712 switch (OMP_CLAUSE_CODE (c))
2714 case OMP_CLAUSE_GANG:
2715 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2716 break;
2717 case OMP_CLAUSE_WORKER:
2718 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2719 break;
2720 case OMP_CLAUSE_VECTOR:
2721 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2722 break;
2723 case OMP_CLAUSE_SEQ:
2724 has_seq = true;
2725 break;
2726 case OMP_CLAUSE_AUTO:
2727 has_auto = true;
2728 break;
2729 default:
2730 break;
2734 if (checking)
2736 if (has_seq && (this_mask || has_auto))
2737 error_at (gimple_location (stmt), "%<seq%> overrides other"
2738 " OpenACC loop specifiers");
2739 else if (has_auto && this_mask)
2740 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2741 " OpenACC loop specifiers");
2743 if (this_mask & outer_mask)
2744 error_at (gimple_location (stmt), "inner loop uses same"
2745 " OpenACC parallelism as containing loop");
2748 return outer_mask | this_mask;
2751 /* Scan a GIMPLE_OMP_FOR. */
2753 static omp_context *
2754 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2756 omp_context *ctx;
2757 size_t i;
2758 tree clauses = gimple_omp_for_clauses (stmt);
2760 ctx = new_omp_context (stmt, outer_ctx);
2762 if (is_gimple_omp_oacc (stmt))
2764 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2766 if (!(tgt && is_oacc_kernels (tgt)))
2767 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2769 tree c_op0;
2770 switch (OMP_CLAUSE_CODE (c))
2772 case OMP_CLAUSE_GANG:
2773 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2774 break;
2776 case OMP_CLAUSE_WORKER:
2777 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2778 break;
2780 case OMP_CLAUSE_VECTOR:
2781 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2782 break;
2784 default:
2785 continue;
2788 if (c_op0)
2790 /* By construction, this is impossible for OpenACC 'kernels'
2791 decomposed parts. */
2792 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2794 error_at (OMP_CLAUSE_LOCATION (c),
2795 "argument not permitted on %qs clause",
2796 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2797 if (tgt)
2798 inform (gimple_location (tgt->stmt),
2799 "enclosing parent compute construct");
2800 else if (oacc_get_fn_attrib (current_function_decl))
2801 inform (DECL_SOURCE_LOCATION (current_function_decl),
2802 "enclosing routine");
2803 else
2804 gcc_unreachable ();
2808 if (tgt && is_oacc_kernels (tgt))
2809 check_oacc_kernel_gwv (stmt, ctx);
2811 /* Collect all variables named in reductions on this loop. Ensure
2812 that, if this loop has a reduction on some variable v, and there is
2813 a reduction on v somewhere in an outer context, then there is a
2814 reduction on v on all intervening loops as well. */
2815 tree local_reduction_clauses = NULL;
2816 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2818 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2819 local_reduction_clauses
2820 = tree_cons (NULL, c, local_reduction_clauses);
2822 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2823 ctx->outer_reduction_clauses
2824 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2825 ctx->outer->outer_reduction_clauses);
2826 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2827 tree local_iter = local_reduction_clauses;
2828 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2830 tree local_clause = TREE_VALUE (local_iter);
2831 tree local_var = OMP_CLAUSE_DECL (local_clause);
2832 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2833 bool have_outer_reduction = false;
2834 tree ctx_iter = outer_reduction_clauses;
2835 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2837 tree outer_clause = TREE_VALUE (ctx_iter);
2838 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2839 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2840 if (outer_var == local_var && outer_op != local_op)
2842 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2843 "conflicting reduction operations for %qE",
2844 local_var);
2845 inform (OMP_CLAUSE_LOCATION (outer_clause),
2846 "location of the previous reduction for %qE",
2847 outer_var);
2849 if (outer_var == local_var)
2851 have_outer_reduction = true;
2852 break;
2855 if (have_outer_reduction)
2857 /* There is a reduction on outer_var both on this loop and on
2858 some enclosing loop. Walk up the context tree until such a
2859 loop with a reduction on outer_var is found, and complain
2860 about all intervening loops that do not have such a
2861 reduction. */
2862 struct omp_context *curr_loop = ctx->outer;
2863 bool found = false;
2864 while (curr_loop != NULL)
2866 tree curr_iter = curr_loop->local_reduction_clauses;
2867 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2869 tree curr_clause = TREE_VALUE (curr_iter);
2870 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2871 if (curr_var == local_var)
2873 found = true;
2874 break;
2877 if (!found)
2878 warning_at (gimple_location (curr_loop->stmt), 0,
2879 "nested loop in reduction needs "
2880 "reduction clause for %qE",
2881 local_var);
2882 else
2883 break;
2884 curr_loop = curr_loop->outer;
2888 ctx->local_reduction_clauses = local_reduction_clauses;
2889 ctx->outer_reduction_clauses
2890 = chainon (unshare_expr (ctx->local_reduction_clauses),
2891 ctx->outer_reduction_clauses);
2893 if (tgt && is_oacc_kernels (tgt))
2895 /* Strip out reductions, as they are not handled yet. */
2896 tree *prev_ptr = &clauses;
2898 while (tree probe = *prev_ptr)
2900 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2902 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2903 *prev_ptr = *next_ptr;
2904 else
2905 prev_ptr = next_ptr;
2908 gimple_omp_for_set_clauses (stmt, clauses);
2912 scan_sharing_clauses (clauses, ctx);
2914 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2915 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2917 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2918 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2919 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2920 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2922 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2923 return ctx;
2926 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2928 static void
2929 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2930 omp_context *outer_ctx)
2932 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2933 gsi_replace (gsi, bind, false);
2934 gimple_seq seq = NULL;
2935 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2936 tree cond = create_tmp_var_raw (integer_type_node);
2937 DECL_CONTEXT (cond) = current_function_decl;
2938 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2939 gimple_bind_set_vars (bind, cond);
2940 gimple_call_set_lhs (g, cond);
2941 gimple_seq_add_stmt (&seq, g);
2942 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2943 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2944 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2945 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2946 gimple_seq_add_stmt (&seq, g);
2947 g = gimple_build_label (lab1);
2948 gimple_seq_add_stmt (&seq, g);
2949 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2950 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2951 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2952 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2953 gimple_omp_for_set_clauses (new_stmt, clause);
2954 gimple_seq_add_stmt (&seq, new_stmt);
2955 g = gimple_build_goto (lab3);
2956 gimple_seq_add_stmt (&seq, g);
2957 g = gimple_build_label (lab2);
2958 gimple_seq_add_stmt (&seq, g);
2959 gimple_seq_add_stmt (&seq, stmt);
2960 g = gimple_build_label (lab3);
2961 gimple_seq_add_stmt (&seq, g);
2962 gimple_bind_set_body (bind, seq);
2963 update_stmt (bind);
2964 scan_omp_for (new_stmt, outer_ctx);
2965 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2968 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2969 struct walk_stmt_info *);
2970 static omp_context *maybe_lookup_ctx (gimple *);
2972 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2973 for scan phase loop. */
2975 static void
2976 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2977 omp_context *outer_ctx)
2979 /* The only change between inclusive and exclusive scan will be
2980 within the first simd loop, so just use inclusive in the
2981 worksharing loop. */
2982 outer_ctx->scan_inclusive = true;
2983 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2984 OMP_CLAUSE_DECL (c) = integer_zero_node;
2986 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2987 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2988 gsi_replace (gsi, input_stmt, false);
2989 gimple_seq input_body = NULL;
2990 gimple_seq_add_stmt (&input_body, stmt);
2991 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2993 gimple_stmt_iterator input1_gsi = gsi_none ();
2994 struct walk_stmt_info wi;
2995 memset (&wi, 0, sizeof (wi));
2996 wi.val_only = true;
2997 wi.info = (void *) &input1_gsi;
2998 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2999 gcc_assert (!gsi_end_p (input1_gsi));
3001 gimple *input_stmt1 = gsi_stmt (input1_gsi);
3002 gsi_next (&input1_gsi);
3003 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
3004 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
3005 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
3006 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3007 std::swap (input_stmt1, scan_stmt1);
3009 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
3010 gimple_omp_set_body (input_stmt1, NULL);
3012 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
3013 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
3015 gimple_omp_set_body (input_stmt1, input_body1);
3016 gimple_omp_set_body (scan_stmt1, NULL);
3018 gimple_stmt_iterator input2_gsi = gsi_none ();
3019 memset (&wi, 0, sizeof (wi));
3020 wi.val_only = true;
3021 wi.info = (void *) &input2_gsi;
3022 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
3023 NULL, &wi);
3024 gcc_assert (!gsi_end_p (input2_gsi));
3026 gimple *input_stmt2 = gsi_stmt (input2_gsi);
3027 gsi_next (&input2_gsi);
3028 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
3029 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
3030 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3031 std::swap (input_stmt2, scan_stmt2);
3033 gimple_omp_set_body (input_stmt2, NULL);
3035 gimple_omp_set_body (input_stmt, input_body);
3036 gimple_omp_set_body (scan_stmt, scan_body);
3038 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
3039 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
3041 ctx = new_omp_context (scan_stmt, outer_ctx);
3042 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
3044 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
3047 /* Scan an OpenMP sections directive. */
3049 static void
3050 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
3052 omp_context *ctx;
3054 ctx = new_omp_context (stmt, outer_ctx);
3055 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
3056 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3059 /* Scan an OpenMP single directive. */
3061 static void
3062 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
3064 omp_context *ctx;
3065 tree name;
3067 ctx = new_omp_context (stmt, outer_ctx);
3068 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3069 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3070 name = create_tmp_var_name (".omp_copy_s");
3071 name = build_decl (gimple_location (stmt),
3072 TYPE_DECL, name, ctx->record_type);
3073 TYPE_NAME (ctx->record_type) = name;
3075 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
3076 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3078 if (TYPE_FIELDS (ctx->record_type) == NULL)
3079 ctx->record_type = NULL;
3080 else
3081 layout_type (ctx->record_type);
3084 /* Scan a GIMPLE_OMP_TARGET. */
3086 static void
3087 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3089 omp_context *ctx;
3090 tree name;
3091 bool offloaded = is_gimple_omp_offloaded (stmt);
3092 tree clauses = gimple_omp_target_clauses (stmt);
3094 ctx = new_omp_context (stmt, outer_ctx);
3095 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3096 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3097 name = create_tmp_var_name (".omp_data_t");
3098 name = build_decl (gimple_location (stmt),
3099 TYPE_DECL, name, ctx->record_type);
3100 DECL_ARTIFICIAL (name) = 1;
3101 DECL_NAMELESS (name) = 1;
3102 TYPE_NAME (ctx->record_type) = name;
3103 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3105 if (offloaded)
3107 create_omp_child_function (ctx, false);
3108 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3111 scan_sharing_clauses (clauses, ctx);
3112 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3114 if (TYPE_FIELDS (ctx->record_type) == NULL)
3115 ctx->record_type = ctx->receiver_decl = NULL;
3116 else
3118 TYPE_FIELDS (ctx->record_type)
3119 = nreverse (TYPE_FIELDS (ctx->record_type));
3120 if (flag_checking)
3122 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3123 for (tree field = TYPE_FIELDS (ctx->record_type);
3124 field;
3125 field = DECL_CHAIN (field))
3126 gcc_assert (DECL_ALIGN (field) == align);
3128 layout_type (ctx->record_type);
3129 if (offloaded)
3130 fixup_child_record_type (ctx);
3133 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3135 error_at (gimple_location (stmt),
3136 "%<target%> construct with nested %<teams%> construct "
3137 "contains directives outside of the %<teams%> construct");
3138 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3142 /* Scan an OpenMP teams directive. */
3144 static void
3145 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3147 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3149 if (!gimple_omp_teams_host (stmt))
3151 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3152 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3153 return;
3155 taskreg_contexts.safe_push (ctx);
3156 gcc_assert (taskreg_nesting_level == 1);
3157 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3158 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3159 tree name = create_tmp_var_name (".omp_data_s");
3160 name = build_decl (gimple_location (stmt),
3161 TYPE_DECL, name, ctx->record_type);
3162 DECL_ARTIFICIAL (name) = 1;
3163 DECL_NAMELESS (name) = 1;
3164 TYPE_NAME (ctx->record_type) = name;
3165 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3166 create_omp_child_function (ctx, false);
3167 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3169 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3170 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3172 if (TYPE_FIELDS (ctx->record_type) == NULL)
3173 ctx->record_type = ctx->receiver_decl = NULL;
3176 /* Check nesting restrictions. */
3177 static bool
3178 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3180 tree c;
3182 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3183 inside an OpenACC CTX. */
3184 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3185 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3186 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3188 else if (!(is_gimple_omp (stmt)
3189 && is_gimple_omp_oacc (stmt)))
3191 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3193 error_at (gimple_location (stmt),
3194 "non-OpenACC construct inside of OpenACC routine");
3195 return false;
3197 else
3198 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3199 if (is_gimple_omp (octx->stmt)
3200 && is_gimple_omp_oacc (octx->stmt))
3202 error_at (gimple_location (stmt),
3203 "non-OpenACC construct inside of OpenACC region");
3204 return false;
3208 if (ctx != NULL)
3210 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3211 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3213 c = omp_find_clause (gimple_omp_target_clauses (ctx->stmt),
3214 OMP_CLAUSE_DEVICE);
3215 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3217 error_at (gimple_location (stmt),
3218 "OpenMP constructs are not allowed in target region "
3219 "with %<ancestor%>");
3220 return false;
3223 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3224 ctx->teams_nested_p = true;
3225 else
3226 ctx->nonteams_nested_p = true;
3228 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3229 && ctx->outer
3230 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3231 ctx = ctx->outer;
3232 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3233 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3234 && !ctx->loop_p)
3236 c = NULL_TREE;
3237 if (ctx->order_concurrent
3238 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3239 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3240 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3242 error_at (gimple_location (stmt),
3243 "OpenMP constructs other than %<parallel%>, %<loop%>"
3244 " or %<simd%> may not be nested inside a region with"
3245 " the %<order(concurrent)%> clause");
3246 return false;
3248 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3250 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3251 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3253 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3254 && (ctx->outer == NULL
3255 || !gimple_omp_for_combined_into_p (ctx->stmt)
3256 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3257 || (gimple_omp_for_kind (ctx->outer->stmt)
3258 != GF_OMP_FOR_KIND_FOR)
3259 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3261 error_at (gimple_location (stmt),
3262 "%<ordered simd threads%> must be closely "
3263 "nested inside of %<%s simd%> region",
3264 lang_GNU_Fortran () ? "do" : "for");
3265 return false;
3267 return true;
3270 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3271 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3272 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3273 return true;
3274 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3275 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3276 return true;
3277 error_at (gimple_location (stmt),
3278 "OpenMP constructs other than "
3279 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3280 "not be nested inside %<simd%> region");
3281 return false;
3283 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3285 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3286 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3287 && omp_find_clause (gimple_omp_for_clauses (stmt),
3288 OMP_CLAUSE_BIND) == NULL_TREE))
3289 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3291 error_at (gimple_location (stmt),
3292 "only %<distribute%>, %<parallel%> or %<loop%> "
3293 "regions are allowed to be strictly nested inside "
3294 "%<teams%> region");
3295 return false;
3298 else if (ctx->order_concurrent
3299 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3300 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3301 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3302 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3304 if (ctx->loop_p)
3305 error_at (gimple_location (stmt),
3306 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3307 "%<simd%> may not be nested inside a %<loop%> region");
3308 else
3309 error_at (gimple_location (stmt),
3310 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3311 "%<simd%> may not be nested inside a region with "
3312 "the %<order(concurrent)%> clause");
3313 return false;
3316 switch (gimple_code (stmt))
3318 case GIMPLE_OMP_FOR:
3319 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3320 return true;
3321 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3323 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3325 error_at (gimple_location (stmt),
3326 "%<distribute%> region must be strictly nested "
3327 "inside %<teams%> construct");
3328 return false;
3330 return true;
3332 /* We split taskloop into task and nested taskloop in it. */
3333 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3334 return true;
3335 /* For now, hope this will change and loop bind(parallel) will not
3336 be allowed in lots of contexts. */
3337 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3338 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3339 return true;
3340 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3342 bool ok = false;
3344 if (ctx)
3345 switch (gimple_code (ctx->stmt))
3347 case GIMPLE_OMP_FOR:
3348 ok = (gimple_omp_for_kind (ctx->stmt)
3349 == GF_OMP_FOR_KIND_OACC_LOOP);
3350 break;
3352 case GIMPLE_OMP_TARGET:
3353 switch (gimple_omp_target_kind (ctx->stmt))
3355 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3356 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3357 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3358 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3359 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3360 ok = true;
3361 break;
3363 default:
3364 break;
3367 default:
3368 break;
3370 else if (oacc_get_fn_attrib (current_function_decl))
3371 ok = true;
3372 if (!ok)
3374 error_at (gimple_location (stmt),
3375 "OpenACC loop directive must be associated with"
3376 " an OpenACC compute region");
3377 return false;
3380 /* FALLTHRU */
3381 case GIMPLE_CALL:
3382 if (is_gimple_call (stmt)
3383 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3384 == BUILT_IN_GOMP_CANCEL
3385 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3386 == BUILT_IN_GOMP_CANCELLATION_POINT))
3388 const char *bad = NULL;
3389 const char *kind = NULL;
3390 const char *construct
3391 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3392 == BUILT_IN_GOMP_CANCEL)
3393 ? "cancel"
3394 : "cancellation point";
3395 if (ctx == NULL)
3397 error_at (gimple_location (stmt), "orphaned %qs construct",
3398 construct);
3399 return false;
3401 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3402 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3403 : 0)
3405 case 1:
3406 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3407 bad = "parallel";
3408 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3409 == BUILT_IN_GOMP_CANCEL
3410 && !integer_zerop (gimple_call_arg (stmt, 1)))
3411 ctx->cancellable = true;
3412 kind = "parallel";
3413 break;
3414 case 2:
3415 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3416 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3417 bad = "for";
3418 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3419 == BUILT_IN_GOMP_CANCEL
3420 && !integer_zerop (gimple_call_arg (stmt, 1)))
3422 ctx->cancellable = true;
3423 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3424 OMP_CLAUSE_NOWAIT))
3425 warning_at (gimple_location (stmt), 0,
3426 "%<cancel for%> inside "
3427 "%<nowait%> for construct");
3428 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3429 OMP_CLAUSE_ORDERED))
3430 warning_at (gimple_location (stmt), 0,
3431 "%<cancel for%> inside "
3432 "%<ordered%> for construct");
3434 kind = "for";
3435 break;
3436 case 4:
3437 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3438 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3439 bad = "sections";
3440 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3441 == BUILT_IN_GOMP_CANCEL
3442 && !integer_zerop (gimple_call_arg (stmt, 1)))
3444 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3446 ctx->cancellable = true;
3447 if (omp_find_clause (gimple_omp_sections_clauses
3448 (ctx->stmt),
3449 OMP_CLAUSE_NOWAIT))
3450 warning_at (gimple_location (stmt), 0,
3451 "%<cancel sections%> inside "
3452 "%<nowait%> sections construct");
3454 else
3456 gcc_assert (ctx->outer
3457 && gimple_code (ctx->outer->stmt)
3458 == GIMPLE_OMP_SECTIONS);
3459 ctx->outer->cancellable = true;
3460 if (omp_find_clause (gimple_omp_sections_clauses
3461 (ctx->outer->stmt),
3462 OMP_CLAUSE_NOWAIT))
3463 warning_at (gimple_location (stmt), 0,
3464 "%<cancel sections%> inside "
3465 "%<nowait%> sections construct");
3468 kind = "sections";
3469 break;
3470 case 8:
3471 if (!is_task_ctx (ctx)
3472 && (!is_taskloop_ctx (ctx)
3473 || ctx->outer == NULL
3474 || !is_task_ctx (ctx->outer)))
3475 bad = "task";
3476 else
3478 for (omp_context *octx = ctx->outer;
3479 octx; octx = octx->outer)
3481 switch (gimple_code (octx->stmt))
3483 case GIMPLE_OMP_TASKGROUP:
3484 break;
3485 case GIMPLE_OMP_TARGET:
3486 if (gimple_omp_target_kind (octx->stmt)
3487 != GF_OMP_TARGET_KIND_REGION)
3488 continue;
3489 /* FALLTHRU */
3490 case GIMPLE_OMP_PARALLEL:
3491 case GIMPLE_OMP_TEAMS:
3492 error_at (gimple_location (stmt),
3493 "%<%s taskgroup%> construct not closely "
3494 "nested inside of %<taskgroup%> region",
3495 construct);
3496 return false;
3497 case GIMPLE_OMP_TASK:
3498 if (gimple_omp_task_taskloop_p (octx->stmt)
3499 && octx->outer
3500 && is_taskloop_ctx (octx->outer))
3502 tree clauses
3503 = gimple_omp_for_clauses (octx->outer->stmt);
3504 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3505 break;
3507 continue;
3508 default:
3509 continue;
3511 break;
3513 ctx->cancellable = true;
3515 kind = "taskgroup";
3516 break;
3517 default:
3518 error_at (gimple_location (stmt), "invalid arguments");
3519 return false;
3521 if (bad)
3523 error_at (gimple_location (stmt),
3524 "%<%s %s%> construct not closely nested inside of %qs",
3525 construct, kind, bad);
3526 return false;
3529 /* FALLTHRU */
3530 case GIMPLE_OMP_SECTIONS:
3531 case GIMPLE_OMP_SINGLE:
3532 for (; ctx != NULL; ctx = ctx->outer)
3533 switch (gimple_code (ctx->stmt))
3535 case GIMPLE_OMP_FOR:
3536 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3537 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3538 break;
3539 /* FALLTHRU */
3540 case GIMPLE_OMP_SECTIONS:
3541 case GIMPLE_OMP_SINGLE:
3542 case GIMPLE_OMP_ORDERED:
3543 case GIMPLE_OMP_MASTER:
3544 case GIMPLE_OMP_MASKED:
3545 case GIMPLE_OMP_TASK:
3546 case GIMPLE_OMP_CRITICAL:
3547 if (is_gimple_call (stmt))
3549 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3550 != BUILT_IN_GOMP_BARRIER)
3551 return true;
3552 error_at (gimple_location (stmt),
3553 "barrier region may not be closely nested inside "
3554 "of work-sharing, %<loop%>, %<critical%>, "
3555 "%<ordered%>, %<master%>, %<masked%>, explicit "
3556 "%<task%> or %<taskloop%> region");
3557 return false;
3559 error_at (gimple_location (stmt),
3560 "work-sharing region may not be closely nested inside "
3561 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3562 "%<master%>, %<masked%>, explicit %<task%> or "
3563 "%<taskloop%> region");
3564 return false;
3565 case GIMPLE_OMP_PARALLEL:
3566 case GIMPLE_OMP_TEAMS:
3567 return true;
3568 case GIMPLE_OMP_TARGET:
3569 if (gimple_omp_target_kind (ctx->stmt)
3570 == GF_OMP_TARGET_KIND_REGION)
3571 return true;
3572 break;
3573 default:
3574 break;
3576 break;
3577 case GIMPLE_OMP_MASTER:
3578 case GIMPLE_OMP_MASKED:
3579 for (; ctx != NULL; ctx = ctx->outer)
3580 switch (gimple_code (ctx->stmt))
3582 case GIMPLE_OMP_FOR:
3583 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3584 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3585 break;
3586 /* FALLTHRU */
3587 case GIMPLE_OMP_SECTIONS:
3588 case GIMPLE_OMP_SINGLE:
3589 case GIMPLE_OMP_TASK:
3590 error_at (gimple_location (stmt),
3591 "%qs region may not be closely nested inside "
3592 "of work-sharing, %<loop%>, explicit %<task%> or "
3593 "%<taskloop%> region",
3594 gimple_code (stmt) == GIMPLE_OMP_MASTER
3595 ? "master" : "masked");
3596 return false;
3597 case GIMPLE_OMP_PARALLEL:
3598 case GIMPLE_OMP_TEAMS:
3599 return true;
3600 case GIMPLE_OMP_TARGET:
3601 if (gimple_omp_target_kind (ctx->stmt)
3602 == GF_OMP_TARGET_KIND_REGION)
3603 return true;
3604 break;
3605 default:
3606 break;
3608 break;
3609 case GIMPLE_OMP_SCOPE:
3610 for (; ctx != NULL; ctx = ctx->outer)
3611 switch (gimple_code (ctx->stmt))
3613 case GIMPLE_OMP_FOR:
3614 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3615 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3616 break;
3617 /* FALLTHRU */
3618 case GIMPLE_OMP_SECTIONS:
3619 case GIMPLE_OMP_SINGLE:
3620 case GIMPLE_OMP_TASK:
3621 case GIMPLE_OMP_CRITICAL:
3622 case GIMPLE_OMP_ORDERED:
3623 case GIMPLE_OMP_MASTER:
3624 case GIMPLE_OMP_MASKED:
3625 error_at (gimple_location (stmt),
3626 "%<scope%> region may not be closely nested inside "
3627 "of work-sharing, %<loop%>, explicit %<task%>, "
3628 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3629 "or %<masked%> region");
3630 return false;
3631 case GIMPLE_OMP_PARALLEL:
3632 case GIMPLE_OMP_TEAMS:
3633 return true;
3634 case GIMPLE_OMP_TARGET:
3635 if (gimple_omp_target_kind (ctx->stmt)
3636 == GF_OMP_TARGET_KIND_REGION)
3637 return true;
3638 break;
3639 default:
3640 break;
3642 break;
3643 case GIMPLE_OMP_TASK:
3644 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3645 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS)
3647 enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c);
3648 error_at (OMP_CLAUSE_LOCATION (c),
3649 "%<%s(%s)%> is only allowed in %<omp ordered%>",
3650 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross",
3651 kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink");
3652 return false;
3654 break;
3655 case GIMPLE_OMP_ORDERED:
3656 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3657 c; c = OMP_CLAUSE_CHAIN (c))
3659 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS)
3661 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
3663 error_at (OMP_CLAUSE_LOCATION (c),
3664 "invalid depend kind in omp %<ordered%> %<depend%>");
3665 return false;
3667 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3668 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3669 continue;
3672 tree oclause;
3673 /* Look for containing ordered(N) loop. */
3674 if (ctx == NULL
3675 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3676 || (oclause
3677 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3678 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3680 error_at (OMP_CLAUSE_LOCATION (c),
3681 "%<ordered%> construct with %<depend%> clause "
3682 "must be closely nested inside an %<ordered%> loop");
3683 return false;
3686 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3687 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3689 /* ordered simd must be closely nested inside of simd region,
3690 and simd region must not encounter constructs other than
3691 ordered simd, therefore ordered simd may be either orphaned,
3692 or ctx->stmt must be simd. The latter case is handled already
3693 earlier. */
3694 if (ctx != NULL)
3696 error_at (gimple_location (stmt),
3697 "%<ordered%> %<simd%> must be closely nested inside "
3698 "%<simd%> region");
3699 return false;
3702 for (; ctx != NULL; ctx = ctx->outer)
3703 switch (gimple_code (ctx->stmt))
3705 case GIMPLE_OMP_CRITICAL:
3706 case GIMPLE_OMP_TASK:
3707 case GIMPLE_OMP_ORDERED:
3708 ordered_in_taskloop:
3709 error_at (gimple_location (stmt),
3710 "%<ordered%> region may not be closely nested inside "
3711 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3712 "%<taskloop%> region");
3713 return false;
3714 case GIMPLE_OMP_FOR:
3715 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3716 goto ordered_in_taskloop;
3717 tree o;
3718 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3719 OMP_CLAUSE_ORDERED);
3720 if (o == NULL)
3722 error_at (gimple_location (stmt),
3723 "%<ordered%> region must be closely nested inside "
3724 "a loop region with an %<ordered%> clause");
3725 return false;
3727 if (!gimple_omp_ordered_standalone_p (stmt))
3729 if (OMP_CLAUSE_ORDERED_DOACROSS (o))
3731 error_at (gimple_location (stmt),
3732 "%<ordered%> construct without %<doacross%> or "
3733 "%<depend%> clauses must not have the same "
3734 "binding region as %<ordered%> construct with "
3735 "those clauses");
3736 return false;
3738 else if (OMP_CLAUSE_ORDERED_EXPR (o))
3740 tree co
3741 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3742 OMP_CLAUSE_COLLAPSE);
3743 HOST_WIDE_INT
3744 o_n = tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (o));
3745 HOST_WIDE_INT c_n = 1;
3746 if (co)
3747 c_n = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (co));
3748 if (o_n != c_n)
3750 error_at (gimple_location (stmt),
3751 "%<ordered%> construct without %<doacross%> "
3752 "or %<depend%> clauses binds to loop where "
3753 "%<collapse%> argument %wd is different from "
3754 "%<ordered%> argument %wd", c_n, o_n);
3755 return false;
3759 return true;
3760 case GIMPLE_OMP_TARGET:
3761 if (gimple_omp_target_kind (ctx->stmt)
3762 != GF_OMP_TARGET_KIND_REGION)
3763 break;
3764 /* FALLTHRU */
3765 case GIMPLE_OMP_PARALLEL:
3766 case GIMPLE_OMP_TEAMS:
3767 error_at (gimple_location (stmt),
3768 "%<ordered%> region must be closely nested inside "
3769 "a loop region with an %<ordered%> clause");
3770 return false;
3771 default:
3772 break;
3774 break;
3775 case GIMPLE_OMP_CRITICAL:
3777 tree this_stmt_name
3778 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3779 for (; ctx != NULL; ctx = ctx->outer)
3780 if (gomp_critical *other_crit
3781 = dyn_cast <gomp_critical *> (ctx->stmt))
3782 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3784 error_at (gimple_location (stmt),
3785 "%<critical%> region may not be nested inside "
3786 "a %<critical%> region with the same name");
3787 return false;
3790 break;
3791 case GIMPLE_OMP_TEAMS:
3792 if (ctx == NULL)
3793 break;
3794 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3795 || (gimple_omp_target_kind (ctx->stmt)
3796 != GF_OMP_TARGET_KIND_REGION))
3798 /* Teams construct can appear either strictly nested inside of
3799 target construct with no intervening stmts, or can be encountered
3800 only by initial task (so must not appear inside any OpenMP
3801 construct. */
3802 error_at (gimple_location (stmt),
3803 "%<teams%> construct must be closely nested inside of "
3804 "%<target%> construct or not nested in any OpenMP "
3805 "construct");
3806 return false;
3808 break;
3809 case GIMPLE_OMP_TARGET:
3810 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3811 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS)
3813 enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c);
3814 error_at (OMP_CLAUSE_LOCATION (c),
3815 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3816 kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink");
3817 return false;
3819 if (is_gimple_omp_offloaded (stmt)
3820 && oacc_get_fn_attrib (cfun->decl) != NULL)
3822 error_at (gimple_location (stmt),
3823 "OpenACC region inside of OpenACC routine, nested "
3824 "parallelism not supported yet");
3825 return false;
3827 for (; ctx != NULL; ctx = ctx->outer)
3829 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3831 if (is_gimple_omp (stmt)
3832 && is_gimple_omp_oacc (stmt)
3833 && is_gimple_omp (ctx->stmt))
3835 error_at (gimple_location (stmt),
3836 "OpenACC construct inside of non-OpenACC region");
3837 return false;
3839 continue;
3842 const char *stmt_name, *ctx_stmt_name;
3843 switch (gimple_omp_target_kind (stmt))
3845 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3846 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3847 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3848 case GF_OMP_TARGET_KIND_ENTER_DATA:
3849 stmt_name = "target enter data"; break;
3850 case GF_OMP_TARGET_KIND_EXIT_DATA:
3851 stmt_name = "target exit data"; break;
3852 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3853 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3854 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3855 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3856 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3857 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3858 stmt_name = "enter data"; break;
3859 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3860 stmt_name = "exit data"; break;
3861 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3862 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3863 break;
3864 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3865 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3866 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3867 /* OpenACC 'kernels' decomposed parts. */
3868 stmt_name = "kernels"; break;
3869 default: gcc_unreachable ();
3871 switch (gimple_omp_target_kind (ctx->stmt))
3873 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3874 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3875 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3876 ctx_stmt_name = "parallel"; break;
3877 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3878 ctx_stmt_name = "kernels"; break;
3879 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3880 ctx_stmt_name = "serial"; break;
3881 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3882 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3883 ctx_stmt_name = "host_data"; break;
3884 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3885 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3886 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3887 /* OpenACC 'kernels' decomposed parts. */
3888 ctx_stmt_name = "kernels"; break;
3889 default: gcc_unreachable ();
3892 /* OpenACC/OpenMP mismatch? */
3893 if (is_gimple_omp_oacc (stmt)
3894 != is_gimple_omp_oacc (ctx->stmt))
3896 error_at (gimple_location (stmt),
3897 "%s %qs construct inside of %s %qs region",
3898 (is_gimple_omp_oacc (stmt)
3899 ? "OpenACC" : "OpenMP"), stmt_name,
3900 (is_gimple_omp_oacc (ctx->stmt)
3901 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3902 return false;
3904 if (is_gimple_omp_offloaded (ctx->stmt))
3906 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3907 if (is_gimple_omp_oacc (ctx->stmt))
3909 error_at (gimple_location (stmt),
3910 "%qs construct inside of %qs region",
3911 stmt_name, ctx_stmt_name);
3912 return false;
3914 else
3916 if ((gimple_omp_target_kind (ctx->stmt)
3917 == GF_OMP_TARGET_KIND_REGION)
3918 && (gimple_omp_target_kind (stmt)
3919 == GF_OMP_TARGET_KIND_REGION))
3921 c = omp_find_clause (gimple_omp_target_clauses (stmt),
3922 OMP_CLAUSE_DEVICE);
3923 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3924 break;
3926 warning_at (gimple_location (stmt), 0,
3927 "%qs construct inside of %qs region",
3928 stmt_name, ctx_stmt_name);
3932 break;
3933 default:
3934 break;
3936 return true;
3940 /* Helper function scan_omp.
3942 Callback for walk_tree or operators in walk_gimple_stmt used to
3943 scan for OMP directives in TP. */
3945 static tree
3946 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3948 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3949 omp_context *ctx = (omp_context *) wi->info;
3950 tree t = *tp;
3952 switch (TREE_CODE (t))
3954 case VAR_DECL:
3955 case PARM_DECL:
3956 case LABEL_DECL:
3957 case RESULT_DECL:
3958 if (ctx)
3960 tree repl = remap_decl (t, &ctx->cb);
3961 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3962 *tp = repl;
3964 break;
3966 default:
3967 if (ctx && TYPE_P (t))
3968 *tp = remap_type (t, &ctx->cb);
3969 else if (!DECL_P (t))
3971 *walk_subtrees = 1;
3972 if (ctx)
3974 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3975 if (tem != TREE_TYPE (t))
3977 if (TREE_CODE (t) == INTEGER_CST)
3978 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3979 else
3980 TREE_TYPE (t) = tem;
3984 break;
3987 return NULL_TREE;
3990 /* Return true if FNDECL is a setjmp or a longjmp. */
3992 static bool
3993 setjmp_or_longjmp_p (const_tree fndecl)
3995 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3996 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3997 return true;
3999 tree declname = DECL_NAME (fndecl);
4000 if (!declname
4001 || (DECL_CONTEXT (fndecl) != NULL_TREE
4002 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
4003 || !TREE_PUBLIC (fndecl))
4004 return false;
4006 const char *name = IDENTIFIER_POINTER (declname);
4007 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
4010 /* Return true if FNDECL is an omp_* runtime API call. */
4012 static bool
4013 omp_runtime_api_call (const_tree fndecl)
4015 tree declname = DECL_NAME (fndecl);
4016 if (!declname
4017 || (DECL_CONTEXT (fndecl) != NULL_TREE
4018 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
4019 || !TREE_PUBLIC (fndecl))
4020 return false;
4022 const char *name = IDENTIFIER_POINTER (declname);
4023 if (!startswith (name, "omp_"))
4024 return false;
4026 static const char *omp_runtime_apis[] =
4028 /* This array has 3 sections. First omp_* calls that don't
4029 have any suffixes. */
4030 "aligned_alloc",
4031 "aligned_calloc",
4032 "alloc",
4033 "calloc",
4034 "free",
4035 "get_mapped_ptr",
4036 "realloc",
4037 "target_alloc",
4038 "target_associate_ptr",
4039 "target_disassociate_ptr",
4040 "target_free",
4041 "target_is_accessible",
4042 "target_is_present",
4043 "target_memcpy",
4044 "target_memcpy_async",
4045 "target_memcpy_rect",
4046 "target_memcpy_rect_async",
4047 NULL,
4048 /* Now omp_* calls that are available as omp_* and omp_*_; however, the
4049 DECL_NAME is always omp_* without tailing underscore. */
4050 "capture_affinity",
4051 "destroy_allocator",
4052 "destroy_lock",
4053 "destroy_nest_lock",
4054 "display_affinity",
4055 "fulfill_event",
4056 "get_active_level",
4057 "get_affinity_format",
4058 "get_cancellation",
4059 "get_default_allocator",
4060 "get_default_device",
4061 "get_device_num",
4062 "get_dynamic",
4063 "get_initial_device",
4064 "get_level",
4065 "get_max_active_levels",
4066 "get_max_task_priority",
4067 "get_max_teams",
4068 "get_max_threads",
4069 "get_nested",
4070 "get_num_devices",
4071 "get_num_places",
4072 "get_num_procs",
4073 "get_num_teams",
4074 "get_num_threads",
4075 "get_partition_num_places",
4076 "get_place_num",
4077 "get_proc_bind",
4078 "get_supported_active_levels",
4079 "get_team_num",
4080 "get_teams_thread_limit",
4081 "get_thread_limit",
4082 "get_thread_num",
4083 "get_wtick",
4084 "get_wtime",
4085 "in_explicit_task",
4086 "in_final",
4087 "in_parallel",
4088 "init_lock",
4089 "init_nest_lock",
4090 "is_initial_device",
4091 "pause_resource",
4092 "pause_resource_all",
4093 "set_affinity_format",
4094 "set_default_allocator",
4095 "set_lock",
4096 "set_nest_lock",
4097 "test_lock",
4098 "test_nest_lock",
4099 "unset_lock",
4100 "unset_nest_lock",
4101 NULL,
4102 /* And finally calls available as omp_*, omp_*_ and omp_*_8_; however,
4103 as DECL_NAME only omp_* and omp_*_8 appear. */
4104 "display_env",
4105 "get_ancestor_thread_num",
4106 "init_allocator",
4107 "get_partition_place_nums",
4108 "get_place_num_procs",
4109 "get_place_proc_ids",
4110 "get_schedule",
4111 "get_team_size",
4112 "set_default_device",
4113 "set_dynamic",
4114 "set_max_active_levels",
4115 "set_nested",
4116 "set_num_teams",
4117 "set_num_threads",
4118 "set_schedule",
4119 "set_teams_thread_limit"
4122 int mode = 0;
4123 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
4125 if (omp_runtime_apis[i] == NULL)
4127 mode++;
4128 continue;
4130 size_t len = strlen (omp_runtime_apis[i]);
4131 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
4132 && (name[4 + len] == '\0'
4133 || (mode > 1 && strcmp (name + 4 + len, "_8") == 0)))
4134 return true;
4136 return false;
4139 /* Helper function for scan_omp.
4141 Callback for walk_gimple_stmt used to scan for OMP directives in
4142 the current statement in GSI. */
4144 static tree
4145 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4146 struct walk_stmt_info *wi)
4148 gimple *stmt = gsi_stmt (*gsi);
4149 omp_context *ctx = (omp_context *) wi->info;
4151 if (gimple_has_location (stmt))
4152 input_location = gimple_location (stmt);
4154 /* Check the nesting restrictions. */
4155 bool remove = false;
4156 if (is_gimple_omp (stmt))
4157 remove = !check_omp_nesting_restrictions (stmt, ctx);
4158 else if (is_gimple_call (stmt))
4160 tree fndecl = gimple_call_fndecl (stmt);
4161 if (fndecl)
4163 if (ctx
4164 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4165 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4166 && setjmp_or_longjmp_p (fndecl)
4167 && !ctx->loop_p)
4169 remove = true;
4170 error_at (gimple_location (stmt),
4171 "setjmp/longjmp inside %<simd%> construct");
4173 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4174 switch (DECL_FUNCTION_CODE (fndecl))
4176 case BUILT_IN_GOMP_BARRIER:
4177 case BUILT_IN_GOMP_CANCEL:
4178 case BUILT_IN_GOMP_CANCELLATION_POINT:
4179 case BUILT_IN_GOMP_TASKYIELD:
4180 case BUILT_IN_GOMP_TASKWAIT:
4181 case BUILT_IN_GOMP_TASKGROUP_START:
4182 case BUILT_IN_GOMP_TASKGROUP_END:
4183 remove = !check_omp_nesting_restrictions (stmt, ctx);
4184 break;
4185 default:
4186 break;
4188 else if (ctx)
4190 omp_context *octx = ctx;
4191 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4192 octx = ctx->outer;
4193 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4195 remove = true;
4196 error_at (gimple_location (stmt),
4197 "OpenMP runtime API call %qD in a region with "
4198 "%<order(concurrent)%> clause", fndecl);
4200 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4201 && omp_runtime_api_call (fndecl)
4202 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4203 != strlen ("omp_get_num_teams"))
4204 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4205 "omp_get_num_teams") != 0)
4206 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4207 != strlen ("omp_get_team_num"))
4208 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4209 "omp_get_team_num") != 0))
4211 remove = true;
4212 error_at (gimple_location (stmt),
4213 "OpenMP runtime API call %qD strictly nested in a "
4214 "%<teams%> region", fndecl);
4216 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
4217 && (gimple_omp_target_kind (ctx->stmt)
4218 == GF_OMP_TARGET_KIND_REGION)
4219 && omp_runtime_api_call (fndecl))
4221 tree tgt_clauses = gimple_omp_target_clauses (ctx->stmt);
4222 tree c = omp_find_clause (tgt_clauses, OMP_CLAUSE_DEVICE);
4223 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4224 error_at (gimple_location (stmt),
4225 "OpenMP runtime API call %qD in a region with "
4226 "%<device(ancestor)%> clause", fndecl);
4231 if (remove)
4233 stmt = gimple_build_nop ();
4234 gsi_replace (gsi, stmt, false);
4237 *handled_ops_p = true;
4239 switch (gimple_code (stmt))
4241 case GIMPLE_OMP_PARALLEL:
4242 taskreg_nesting_level++;
4243 scan_omp_parallel (gsi, ctx);
4244 taskreg_nesting_level--;
4245 break;
4247 case GIMPLE_OMP_TASK:
4248 taskreg_nesting_level++;
4249 scan_omp_task (gsi, ctx);
4250 taskreg_nesting_level--;
4251 break;
4253 case GIMPLE_OMP_FOR:
4254 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4255 == GF_OMP_FOR_KIND_SIMD)
4256 && gimple_omp_for_combined_into_p (stmt)
4257 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4259 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4260 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4261 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4263 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4264 break;
4267 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4268 == GF_OMP_FOR_KIND_SIMD)
4269 && omp_maybe_offloaded_ctx (ctx)
4270 && omp_max_simt_vf ()
4271 && gimple_omp_for_collapse (stmt) == 1)
4272 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4273 else
4274 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4275 break;
4277 case GIMPLE_OMP_SCOPE:
4278 ctx = new_omp_context (stmt, ctx);
4279 scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4280 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4281 break;
4283 case GIMPLE_OMP_SECTIONS:
4284 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4285 break;
4287 case GIMPLE_OMP_SINGLE:
4288 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4289 break;
4291 case GIMPLE_OMP_SCAN:
4292 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4294 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4295 ctx->scan_inclusive = true;
4296 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4297 ctx->scan_exclusive = true;
4299 /* FALLTHRU */
4300 case GIMPLE_OMP_SECTION:
4301 case GIMPLE_OMP_MASTER:
4302 case GIMPLE_OMP_ORDERED:
4303 case GIMPLE_OMP_CRITICAL:
4304 ctx = new_omp_context (stmt, ctx);
4305 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4306 break;
4308 case GIMPLE_OMP_MASKED:
4309 ctx = new_omp_context (stmt, ctx);
4310 scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4311 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4312 break;
4314 case GIMPLE_OMP_TASKGROUP:
4315 ctx = new_omp_context (stmt, ctx);
4316 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4317 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4318 break;
4320 case GIMPLE_OMP_TARGET:
4321 if (is_gimple_omp_offloaded (stmt))
4323 taskreg_nesting_level++;
4324 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4325 taskreg_nesting_level--;
4327 else
4328 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4329 break;
4331 case GIMPLE_OMP_TEAMS:
4332 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4334 taskreg_nesting_level++;
4335 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4336 taskreg_nesting_level--;
4338 else
4339 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4340 break;
4342 case GIMPLE_BIND:
4344 tree var;
4346 *handled_ops_p = false;
4347 if (ctx)
4348 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4349 var ;
4350 var = DECL_CHAIN (var))
4351 insert_decl_map (&ctx->cb, var, var);
4353 break;
4354 default:
4355 *handled_ops_p = false;
4356 break;
4359 return NULL_TREE;
4363 /* Scan all the statements starting at the current statement. CTX
4364 contains context information about the OMP directives and
4365 clauses found during the scan. */
4367 static void
4368 scan_omp (gimple_seq *body_p, omp_context *ctx)
4370 location_t saved_location;
4371 struct walk_stmt_info wi;
4373 memset (&wi, 0, sizeof (wi));
4374 wi.info = ctx;
4375 wi.want_locations = true;
4377 saved_location = input_location;
4378 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4379 input_location = saved_location;
4382 /* Re-gimplification and code generation routines. */
4384 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4385 of BIND if in a method. */
4387 static void
4388 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4390 if (DECL_ARGUMENTS (current_function_decl)
4391 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4392 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4393 == POINTER_TYPE))
4395 tree vars = gimple_bind_vars (bind);
4396 for (tree *pvar = &vars; *pvar; )
4397 if (omp_member_access_dummy_var (*pvar))
4398 *pvar = DECL_CHAIN (*pvar);
4399 else
4400 pvar = &DECL_CHAIN (*pvar);
4401 gimple_bind_set_vars (bind, vars);
4405 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4406 block and its subblocks. */
4408 static void
4409 remove_member_access_dummy_vars (tree block)
4411 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4412 if (omp_member_access_dummy_var (*pvar))
4413 *pvar = DECL_CHAIN (*pvar);
4414 else
4415 pvar = &DECL_CHAIN (*pvar);
4417 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4418 remove_member_access_dummy_vars (block);
4421 /* If a context was created for STMT when it was scanned, return it. */
4423 static omp_context *
4424 maybe_lookup_ctx (gimple *stmt)
4426 splay_tree_node n;
4427 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4428 return n ? (omp_context *) n->value : NULL;
4432 /* Find the mapping for DECL in CTX or the immediately enclosing
4433 context that has a mapping for DECL.
4435 If CTX is a nested parallel directive, we may have to use the decl
4436 mappings created in CTX's parent context. Suppose that we have the
4437 following parallel nesting (variable UIDs showed for clarity):
4439 iD.1562 = 0;
4440 #omp parallel shared(iD.1562) -> outer parallel
4441 iD.1562 = iD.1562 + 1;
4443 #omp parallel shared (iD.1562) -> inner parallel
4444 iD.1562 = iD.1562 - 1;
4446 Each parallel structure will create a distinct .omp_data_s structure
4447 for copying iD.1562 in/out of the directive:
4449 outer parallel .omp_data_s.1.i -> iD.1562
4450 inner parallel .omp_data_s.2.i -> iD.1562
4452 A shared variable mapping will produce a copy-out operation before
4453 the parallel directive and a copy-in operation after it. So, in
4454 this case we would have:
4456 iD.1562 = 0;
4457 .omp_data_o.1.i = iD.1562;
4458 #omp parallel shared(iD.1562) -> outer parallel
4459 .omp_data_i.1 = &.omp_data_o.1
4460 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4462 .omp_data_o.2.i = iD.1562; -> **
4463 #omp parallel shared(iD.1562) -> inner parallel
4464 .omp_data_i.2 = &.omp_data_o.2
4465 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4468 ** This is a problem. The symbol iD.1562 cannot be referenced
4469 inside the body of the outer parallel region. But since we are
4470 emitting this copy operation while expanding the inner parallel
4471 directive, we need to access the CTX structure of the outer
4472 parallel directive to get the correct mapping:
4474 .omp_data_o.2.i = .omp_data_i.1->i
4476 Since there may be other workshare or parallel directives enclosing
4477 the parallel directive, it may be necessary to walk up the context
4478 parent chain. This is not a problem in general because nested
4479 parallelism happens only rarely. */
4481 static tree
4482 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4484 tree t;
4485 omp_context *up;
4487 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4488 t = maybe_lookup_decl (decl, up);
4490 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4492 return t ? t : decl;
4496 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4497 in outer contexts. */
4499 static tree
4500 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4502 tree t = NULL;
4503 omp_context *up;
4505 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4506 t = maybe_lookup_decl (decl, up);
4508 return t ? t : decl;
4512 /* Construct the initialization value for reduction operation OP. */
4514 tree
4515 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4517 switch (op)
4519 case PLUS_EXPR:
4520 case MINUS_EXPR:
4521 case BIT_IOR_EXPR:
4522 case BIT_XOR_EXPR:
4523 case TRUTH_OR_EXPR:
4524 case TRUTH_ORIF_EXPR:
4525 case TRUTH_XOR_EXPR:
4526 case NE_EXPR:
4527 return build_zero_cst (type);
4529 case MULT_EXPR:
4530 case TRUTH_AND_EXPR:
4531 case TRUTH_ANDIF_EXPR:
4532 case EQ_EXPR:
4533 return fold_convert_loc (loc, type, integer_one_node);
4535 case BIT_AND_EXPR:
4536 return fold_convert_loc (loc, type, integer_minus_one_node);
4538 case MAX_EXPR:
4539 if (SCALAR_FLOAT_TYPE_P (type))
4541 REAL_VALUE_TYPE min;
4542 if (HONOR_INFINITIES (type))
4543 real_arithmetic (&min, NEGATE_EXPR, &dconstinf, NULL);
4544 else
4545 real_maxval (&min, 1, TYPE_MODE (type));
4546 return build_real (type, min);
4548 else if (POINTER_TYPE_P (type))
4550 wide_int min
4551 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4552 return wide_int_to_tree (type, min);
4554 else
4556 gcc_assert (INTEGRAL_TYPE_P (type));
4557 return TYPE_MIN_VALUE (type);
4560 case MIN_EXPR:
4561 if (SCALAR_FLOAT_TYPE_P (type))
4563 REAL_VALUE_TYPE max;
4564 if (HONOR_INFINITIES (type))
4565 max = dconstinf;
4566 else
4567 real_maxval (&max, 0, TYPE_MODE (type));
4568 return build_real (type, max);
4570 else if (POINTER_TYPE_P (type))
4572 wide_int max
4573 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4574 return wide_int_to_tree (type, max);
4576 else
4578 gcc_assert (INTEGRAL_TYPE_P (type));
4579 return TYPE_MAX_VALUE (type);
4582 default:
4583 gcc_unreachable ();
4587 /* Construct the initialization value for reduction CLAUSE. */
4589 tree
4590 omp_reduction_init (tree clause, tree type)
4592 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4593 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4596 /* Return alignment to be assumed for var in CLAUSE, which should be
4597 OMP_CLAUSE_ALIGNED. */
4599 static tree
4600 omp_clause_aligned_alignment (tree clause)
4602 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4603 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4605 /* Otherwise return implementation defined alignment. */
4606 unsigned int al = 1;
4607 opt_scalar_mode mode_iter;
4608 auto_vector_modes modes;
4609 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4610 static enum mode_class classes[]
4611 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4612 for (int i = 0; i < 4; i += 2)
4613 /* The for loop above dictates that we only walk through scalar classes. */
4614 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4616 scalar_mode mode = mode_iter.require ();
4617 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4618 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4619 continue;
4620 machine_mode alt_vmode;
4621 for (unsigned int j = 0; j < modes.length (); ++j)
4622 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4623 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4624 vmode = alt_vmode;
4626 tree type = lang_hooks.types.type_for_mode (mode, 1);
4627 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4628 continue;
4629 type = build_vector_type_for_mode (type, vmode);
4630 if (TYPE_MODE (type) != vmode)
4631 continue;
4632 if (TYPE_ALIGN_UNIT (type) > al)
4633 al = TYPE_ALIGN_UNIT (type);
4635 return build_int_cst (integer_type_node, al);
4639 /* This structure is part of the interface between lower_rec_simd_input_clauses
4640 and lower_rec_input_clauses. */
4642 class omplow_simd_context {
4643 public:
4644 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4645 tree idx;
4646 tree lane;
4647 tree lastlane;
4648 vec<tree, va_heap> simt_eargs;
4649 gimple_seq simt_dlist;
4650 poly_uint64_pod max_vf;
4651 bool is_simt;
4654 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4655 privatization. */
4657 static bool
4658 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4659 omplow_simd_context *sctx, tree &ivar,
4660 tree &lvar, tree *rvar = NULL,
4661 tree *rvar2 = NULL)
4663 if (known_eq (sctx->max_vf, 0U))
4665 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4666 if (maybe_gt (sctx->max_vf, 1U))
4668 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4669 OMP_CLAUSE_SAFELEN);
4670 if (c)
4672 poly_uint64 safe_len;
4673 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4674 || maybe_lt (safe_len, 1U))
4675 sctx->max_vf = 1;
4676 else
4677 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4680 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4682 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4683 c = OMP_CLAUSE_CHAIN (c))
4685 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4686 continue;
4688 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4690 /* UDR reductions are not supported yet for SIMT, disable
4691 SIMT. */
4692 sctx->max_vf = 1;
4693 break;
4696 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4697 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4699 /* Doing boolean operations on non-integral types is
4700 for conformance only, it's not worth supporting this
4701 for SIMT. */
4702 sctx->max_vf = 1;
4703 break;
4707 if (maybe_gt (sctx->max_vf, 1U))
4709 sctx->idx = create_tmp_var (unsigned_type_node);
4710 sctx->lane = create_tmp_var (unsigned_type_node);
4713 if (known_eq (sctx->max_vf, 1U))
4714 return false;
4716 if (sctx->is_simt)
4718 if (is_gimple_reg (new_var))
4720 ivar = lvar = new_var;
4721 return true;
4723 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4724 ivar = lvar = create_tmp_var (type);
4725 TREE_ADDRESSABLE (ivar) = 1;
4726 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4727 NULL, DECL_ATTRIBUTES (ivar));
4728 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4729 tree clobber = build_clobber (type);
4730 gimple *g = gimple_build_assign (ivar, clobber);
4731 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4733 else
4735 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4736 tree avar = create_tmp_var_raw (atype);
4737 if (TREE_ADDRESSABLE (new_var))
4738 TREE_ADDRESSABLE (avar) = 1;
4739 DECL_ATTRIBUTES (avar)
4740 = tree_cons (get_identifier ("omp simd array"), NULL,
4741 DECL_ATTRIBUTES (avar));
4742 gimple_add_tmp_var (avar);
4743 tree iavar = avar;
4744 if (rvar && !ctx->for_simd_scan_phase)
4746 /* For inscan reductions, create another array temporary,
4747 which will hold the reduced value. */
4748 iavar = create_tmp_var_raw (atype);
4749 if (TREE_ADDRESSABLE (new_var))
4750 TREE_ADDRESSABLE (iavar) = 1;
4751 DECL_ATTRIBUTES (iavar)
4752 = tree_cons (get_identifier ("omp simd array"), NULL,
4753 tree_cons (get_identifier ("omp simd inscan"), NULL,
4754 DECL_ATTRIBUTES (iavar)));
4755 gimple_add_tmp_var (iavar);
4756 ctx->cb.decl_map->put (avar, iavar);
4757 if (sctx->lastlane == NULL_TREE)
4758 sctx->lastlane = create_tmp_var (unsigned_type_node);
4759 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4760 sctx->lastlane, NULL_TREE, NULL_TREE);
4761 TREE_THIS_NOTRAP (*rvar) = 1;
4763 if (ctx->scan_exclusive)
4765 /* And for exclusive scan yet another one, which will
4766 hold the value during the scan phase. */
4767 tree savar = create_tmp_var_raw (atype);
4768 if (TREE_ADDRESSABLE (new_var))
4769 TREE_ADDRESSABLE (savar) = 1;
4770 DECL_ATTRIBUTES (savar)
4771 = tree_cons (get_identifier ("omp simd array"), NULL,
4772 tree_cons (get_identifier ("omp simd inscan "
4773 "exclusive"), NULL,
4774 DECL_ATTRIBUTES (savar)));
4775 gimple_add_tmp_var (savar);
4776 ctx->cb.decl_map->put (iavar, savar);
4777 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4778 sctx->idx, NULL_TREE, NULL_TREE);
4779 TREE_THIS_NOTRAP (*rvar2) = 1;
4782 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4783 NULL_TREE, NULL_TREE);
4784 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4785 NULL_TREE, NULL_TREE);
4786 TREE_THIS_NOTRAP (ivar) = 1;
4787 TREE_THIS_NOTRAP (lvar) = 1;
4789 if (DECL_P (new_var))
4791 SET_DECL_VALUE_EXPR (new_var, lvar);
4792 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4794 return true;
4797 /* Helper function of lower_rec_input_clauses. For a reference
4798 in simd reduction, add an underlying variable it will reference. */
4800 static void
4801 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4803 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4804 if (TREE_CONSTANT (z))
4806 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4807 get_name (new_vard));
4808 gimple_add_tmp_var (z);
4809 TREE_ADDRESSABLE (z) = 1;
4810 z = build_fold_addr_expr_loc (loc, z);
4811 gimplify_assign (new_vard, z, ilist);
4815 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4816 code to emit (type) (tskred_temp[idx]). */
4818 static tree
4819 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4820 unsigned idx)
4822 unsigned HOST_WIDE_INT sz
4823 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4824 tree r = build2 (MEM_REF, pointer_sized_int_node,
4825 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4826 idx * sz));
4827 tree v = create_tmp_var (pointer_sized_int_node);
4828 gimple *g = gimple_build_assign (v, r);
4829 gimple_seq_add_stmt (ilist, g);
4830 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4832 v = create_tmp_var (type);
4833 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4834 gimple_seq_add_stmt (ilist, g);
4836 return v;
4839 /* Lower early initialization of privatized variable NEW_VAR
4840 if it needs an allocator (has allocate clause). */
4842 static bool
4843 lower_private_allocate (tree var, tree new_var, tree &allocator,
4844 tree &allocate_ptr, gimple_seq *ilist,
4845 omp_context *ctx, bool is_ref, tree size)
4847 if (allocator)
4848 return false;
4849 gcc_assert (allocate_ptr == NULL_TREE);
4850 if (ctx->allocate_map
4851 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4852 if (tree *allocatorp = ctx->allocate_map->get (var))
4853 allocator = *allocatorp;
4854 if (allocator == NULL_TREE)
4855 return false;
4856 if (!is_ref && omp_privatize_by_reference (var))
4858 allocator = NULL_TREE;
4859 return false;
4862 unsigned HOST_WIDE_INT ialign = 0;
4863 if (TREE_CODE (allocator) == TREE_LIST)
4865 ialign = tree_to_uhwi (TREE_VALUE (allocator));
4866 allocator = TREE_PURPOSE (allocator);
4868 if (TREE_CODE (allocator) != INTEGER_CST)
4869 allocator = build_outer_var_ref (allocator, ctx, OMP_CLAUSE_ALLOCATE);
4870 allocator = fold_convert (pointer_sized_int_node, allocator);
4871 if (TREE_CODE (allocator) != INTEGER_CST)
4873 tree var = create_tmp_var (TREE_TYPE (allocator));
4874 gimplify_assign (var, allocator, ilist);
4875 allocator = var;
4878 tree ptr_type, align, sz = size;
4879 if (TYPE_P (new_var))
4881 ptr_type = build_pointer_type (new_var);
4882 ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4884 else if (is_ref)
4886 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4887 ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4889 else
4891 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4892 ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4893 if (sz == NULL_TREE)
4894 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4896 align = build_int_cst (size_type_node, ialign);
4897 if (TREE_CODE (sz) != INTEGER_CST)
4899 tree szvar = create_tmp_var (size_type_node);
4900 gimplify_assign (szvar, sz, ilist);
4901 sz = szvar;
4903 allocate_ptr = create_tmp_var (ptr_type);
4904 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4905 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4906 gimple_call_set_lhs (g, allocate_ptr);
4907 gimple_seq_add_stmt (ilist, g);
4908 if (!is_ref)
4910 tree x = build_simple_mem_ref (allocate_ptr);
4911 TREE_THIS_NOTRAP (x) = 1;
4912 SET_DECL_VALUE_EXPR (new_var, x);
4913 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4915 return true;
4918 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4919 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4920 private variables. Initialization statements go in ILIST, while calls
4921 to destructors go in DLIST. */
4923 static void
4924 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4925 omp_context *ctx, struct omp_for_data *fd)
4927 tree c, copyin_seq, x, ptr;
4928 bool copyin_by_ref = false;
4929 bool lastprivate_firstprivate = false;
4930 bool reduction_omp_orig_ref = false;
4931 int pass;
4932 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4933 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4934 omplow_simd_context sctx = omplow_simd_context ();
4935 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4936 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4937 gimple_seq llist[4] = { };
4938 tree nonconst_simd_if = NULL_TREE;
4940 copyin_seq = NULL;
4941 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4943 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4944 with data sharing clauses referencing variable sized vars. That
4945 is unnecessarily hard to support and very unlikely to result in
4946 vectorized code anyway. */
4947 if (is_simd)
4948 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4949 switch (OMP_CLAUSE_CODE (c))
4951 case OMP_CLAUSE_LINEAR:
4952 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4953 sctx.max_vf = 1;
4954 /* FALLTHRU */
4955 case OMP_CLAUSE_PRIVATE:
4956 case OMP_CLAUSE_FIRSTPRIVATE:
4957 case OMP_CLAUSE_LASTPRIVATE:
4958 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4959 sctx.max_vf = 1;
4960 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4962 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4963 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4964 sctx.max_vf = 1;
4966 break;
4967 case OMP_CLAUSE_REDUCTION:
4968 case OMP_CLAUSE_IN_REDUCTION:
4969 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4970 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4971 sctx.max_vf = 1;
4972 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4974 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4975 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4976 sctx.max_vf = 1;
4978 break;
4979 case OMP_CLAUSE_IF:
4980 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4981 sctx.max_vf = 1;
4982 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4983 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4984 break;
4985 case OMP_CLAUSE_SIMDLEN:
4986 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4987 sctx.max_vf = 1;
4988 break;
4989 case OMP_CLAUSE__CONDTEMP_:
4990 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4991 if (sctx.is_simt)
4992 sctx.max_vf = 1;
4993 break;
4994 default:
4995 continue;
4998 /* Add a placeholder for simduid. */
4999 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
5000 sctx.simt_eargs.safe_push (NULL_TREE);
5002 unsigned task_reduction_cnt = 0;
5003 unsigned task_reduction_cntorig = 0;
5004 unsigned task_reduction_cnt_full = 0;
5005 unsigned task_reduction_cntorig_full = 0;
5006 unsigned task_reduction_other_cnt = 0;
5007 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
5008 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
5009 /* Do all the fixed sized types in the first pass, and the variable sized
5010 types in the second pass. This makes sure that the scalar arguments to
5011 the variable sized types are processed before we use them in the
5012 variable sized operations. For task reductions we use 4 passes, in the
5013 first two we ignore them, in the third one gather arguments for
5014 GOMP_task_reduction_remap call and in the last pass actually handle
5015 the task reductions. */
5016 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
5017 ? 4 : 2); ++pass)
5019 if (pass == 2 && task_reduction_cnt)
5021 tskred_atype
5022 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
5023 + task_reduction_cntorig);
5024 tskred_avar = create_tmp_var_raw (tskred_atype);
5025 gimple_add_tmp_var (tskred_avar);
5026 TREE_ADDRESSABLE (tskred_avar) = 1;
5027 task_reduction_cnt_full = task_reduction_cnt;
5028 task_reduction_cntorig_full = task_reduction_cntorig;
5030 else if (pass == 3 && task_reduction_cnt)
5032 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
5033 gimple *g
5034 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
5035 size_int (task_reduction_cntorig),
5036 build_fold_addr_expr (tskred_avar));
5037 gimple_seq_add_stmt (ilist, g);
5039 if (pass == 3 && task_reduction_other_cnt)
5041 /* For reduction clauses, build
5042 tskred_base = (void *) tskred_temp[2]
5043 + omp_get_thread_num () * tskred_temp[1]
5044 or if tskred_temp[1] is known to be constant, that constant
5045 directly. This is the start of the private reduction copy block
5046 for the current thread. */
5047 tree v = create_tmp_var (integer_type_node);
5048 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
5049 gimple *g = gimple_build_call (x, 0);
5050 gimple_call_set_lhs (g, v);
5051 gimple_seq_add_stmt (ilist, g);
5052 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
5053 tskred_temp = OMP_CLAUSE_DECL (c);
5054 if (is_taskreg_ctx (ctx))
5055 tskred_temp = lookup_decl (tskred_temp, ctx);
5056 tree v2 = create_tmp_var (sizetype);
5057 g = gimple_build_assign (v2, NOP_EXPR, v);
5058 gimple_seq_add_stmt (ilist, g);
5059 if (ctx->task_reductions[0])
5060 v = fold_convert (sizetype, ctx->task_reductions[0]);
5061 else
5062 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
5063 tree v3 = create_tmp_var (sizetype);
5064 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
5065 gimple_seq_add_stmt (ilist, g);
5066 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
5067 tskred_base = create_tmp_var (ptr_type_node);
5068 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
5069 gimple_seq_add_stmt (ilist, g);
5071 task_reduction_cnt = 0;
5072 task_reduction_cntorig = 0;
5073 task_reduction_other_cnt = 0;
5074 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5076 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
5077 tree var, new_var;
5078 bool by_ref;
5079 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5080 bool task_reduction_p = false;
5081 bool task_reduction_needs_orig_p = false;
5082 tree cond = NULL_TREE;
5083 tree allocator, allocate_ptr;
5085 switch (c_kind)
5087 case OMP_CLAUSE_PRIVATE:
5088 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
5089 continue;
5090 break;
5091 case OMP_CLAUSE_SHARED:
5092 /* Ignore shared directives in teams construct inside
5093 of target construct. */
5094 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5095 && !is_host_teams_ctx (ctx))
5096 continue;
5097 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
5099 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
5100 || is_global_var (OMP_CLAUSE_DECL (c)));
5101 continue;
5103 case OMP_CLAUSE_FIRSTPRIVATE:
5104 case OMP_CLAUSE_COPYIN:
5105 break;
5106 case OMP_CLAUSE_LINEAR:
5107 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
5108 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5109 lastprivate_firstprivate = true;
5110 break;
5111 case OMP_CLAUSE_REDUCTION:
5112 case OMP_CLAUSE_IN_REDUCTION:
5113 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5114 || is_task_ctx (ctx)
5115 || OMP_CLAUSE_REDUCTION_TASK (c))
5117 task_reduction_p = true;
5118 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5120 task_reduction_other_cnt++;
5121 if (pass == 2)
5122 continue;
5124 else
5125 task_reduction_cnt++;
5126 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5128 var = OMP_CLAUSE_DECL (c);
5129 /* If var is a global variable that isn't privatized
5130 in outer contexts, we don't need to look up the
5131 original address, it is always the address of the
5132 global variable itself. */
5133 if (!DECL_P (var)
5134 || omp_privatize_by_reference (var)
5135 || !is_global_var
5136 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5138 task_reduction_needs_orig_p = true;
5139 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5140 task_reduction_cntorig++;
5144 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5145 reduction_omp_orig_ref = true;
5146 break;
5147 case OMP_CLAUSE__REDUCTEMP_:
5148 if (!is_taskreg_ctx (ctx))
5149 continue;
5150 /* FALLTHRU */
5151 case OMP_CLAUSE__LOOPTEMP_:
5152 /* Handle _looptemp_/_reductemp_ clauses only on
5153 parallel/task. */
5154 if (fd)
5155 continue;
5156 break;
5157 case OMP_CLAUSE_LASTPRIVATE:
5158 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5160 lastprivate_firstprivate = true;
5161 if (pass != 0 || is_taskloop_ctx (ctx))
5162 continue;
5164 /* Even without corresponding firstprivate, if
5165 decl is Fortran allocatable, it needs outer var
5166 reference. */
5167 else if (pass == 0
5168 && lang_hooks.decls.omp_private_outer_ref
5169 (OMP_CLAUSE_DECL (c)))
5170 lastprivate_firstprivate = true;
5171 break;
5172 case OMP_CLAUSE_ALIGNED:
5173 if (pass != 1)
5174 continue;
5175 var = OMP_CLAUSE_DECL (c);
5176 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5177 && !is_global_var (var))
5179 new_var = maybe_lookup_decl (var, ctx);
5180 if (new_var == NULL_TREE)
5181 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5182 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5183 tree alarg = omp_clause_aligned_alignment (c);
5184 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5185 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5186 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5187 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5188 gimplify_and_add (x, ilist);
5190 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5191 && is_global_var (var))
5193 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5194 new_var = lookup_decl (var, ctx);
5195 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5196 t = build_fold_addr_expr_loc (clause_loc, t);
5197 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5198 tree alarg = omp_clause_aligned_alignment (c);
5199 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5200 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5201 t = fold_convert_loc (clause_loc, ptype, t);
5202 x = create_tmp_var (ptype);
5203 t = build2 (MODIFY_EXPR, ptype, x, t);
5204 gimplify_and_add (t, ilist);
5205 t = build_simple_mem_ref_loc (clause_loc, x);
5206 SET_DECL_VALUE_EXPR (new_var, t);
5207 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5209 continue;
5210 case OMP_CLAUSE__CONDTEMP_:
5211 if (is_parallel_ctx (ctx)
5212 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5213 break;
5214 continue;
5215 default:
5216 continue;
5219 if (task_reduction_p != (pass >= 2))
5220 continue;
5222 allocator = NULL_TREE;
5223 allocate_ptr = NULL_TREE;
5224 new_var = var = OMP_CLAUSE_DECL (c);
5225 if ((c_kind == OMP_CLAUSE_REDUCTION
5226 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5227 && TREE_CODE (var) == MEM_REF)
5229 var = TREE_OPERAND (var, 0);
5230 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5231 var = TREE_OPERAND (var, 0);
5232 if (TREE_CODE (var) == INDIRECT_REF
5233 || TREE_CODE (var) == ADDR_EXPR)
5234 var = TREE_OPERAND (var, 0);
5235 if (is_variable_sized (var))
5237 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5238 var = DECL_VALUE_EXPR (var);
5239 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5240 var = TREE_OPERAND (var, 0);
5241 gcc_assert (DECL_P (var));
5243 new_var = var;
5245 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5247 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5248 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5250 else if (c_kind != OMP_CLAUSE_COPYIN)
5251 new_var = lookup_decl (var, ctx);
5253 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5255 if (pass != 0)
5256 continue;
5258 /* C/C++ array section reductions. */
5259 else if ((c_kind == OMP_CLAUSE_REDUCTION
5260 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5261 && var != OMP_CLAUSE_DECL (c))
5263 if (pass == 0)
5264 continue;
5266 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5267 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5269 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5271 tree b = TREE_OPERAND (orig_var, 1);
5272 if (is_omp_target (ctx->stmt))
5273 b = NULL_TREE;
5274 else
5275 b = maybe_lookup_decl (b, ctx);
5276 if (b == NULL)
5278 b = TREE_OPERAND (orig_var, 1);
5279 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5281 if (integer_zerop (bias))
5282 bias = b;
5283 else
5285 bias = fold_convert_loc (clause_loc,
5286 TREE_TYPE (b), bias);
5287 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5288 TREE_TYPE (b), b, bias);
5290 orig_var = TREE_OPERAND (orig_var, 0);
5292 if (pass == 2)
5294 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5295 if (is_global_var (out)
5296 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5297 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5298 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5299 != POINTER_TYPE)))
5300 x = var;
5301 else if (is_omp_target (ctx->stmt))
5302 x = out;
5303 else
5305 bool by_ref = use_pointer_for_field (var, NULL);
5306 x = build_receiver_ref (var, by_ref, ctx);
5307 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5308 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5309 == POINTER_TYPE))
5310 x = build_fold_addr_expr (x);
5312 if (TREE_CODE (orig_var) == INDIRECT_REF)
5313 x = build_simple_mem_ref (x);
5314 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5316 if (var == TREE_OPERAND (orig_var, 0))
5317 x = build_fold_addr_expr (x);
5319 bias = fold_convert (sizetype, bias);
5320 x = fold_convert (ptr_type_node, x);
5321 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5322 TREE_TYPE (x), x, bias);
5323 unsigned cnt = task_reduction_cnt - 1;
5324 if (!task_reduction_needs_orig_p)
5325 cnt += (task_reduction_cntorig_full
5326 - task_reduction_cntorig);
5327 else
5328 cnt = task_reduction_cntorig - 1;
5329 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5330 size_int (cnt), NULL_TREE, NULL_TREE);
5331 gimplify_assign (r, x, ilist);
5332 continue;
5335 if (TREE_CODE (orig_var) == INDIRECT_REF
5336 || TREE_CODE (orig_var) == ADDR_EXPR)
5337 orig_var = TREE_OPERAND (orig_var, 0);
5338 tree d = OMP_CLAUSE_DECL (c);
5339 tree type = TREE_TYPE (d);
5340 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5341 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5342 tree sz = v;
5343 const char *name = get_name (orig_var);
5344 if (pass != 3 && !TREE_CONSTANT (v))
5346 tree t;
5347 if (is_omp_target (ctx->stmt))
5348 t = NULL_TREE;
5349 else
5350 t = maybe_lookup_decl (v, ctx);
5351 if (t)
5352 v = t;
5353 else
5354 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5355 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5356 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5357 TREE_TYPE (v), v,
5358 build_int_cst (TREE_TYPE (v), 1));
5359 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5360 TREE_TYPE (v), t,
5361 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5363 if (pass == 3)
5365 tree xv = create_tmp_var (ptr_type_node);
5366 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5368 unsigned cnt = task_reduction_cnt - 1;
5369 if (!task_reduction_needs_orig_p)
5370 cnt += (task_reduction_cntorig_full
5371 - task_reduction_cntorig);
5372 else
5373 cnt = task_reduction_cntorig - 1;
5374 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5375 size_int (cnt), NULL_TREE, NULL_TREE);
5377 gimple *g = gimple_build_assign (xv, x);
5378 gimple_seq_add_stmt (ilist, g);
5380 else
5382 unsigned int idx = *ctx->task_reduction_map->get (c);
5383 tree off;
5384 if (ctx->task_reductions[1 + idx])
5385 off = fold_convert (sizetype,
5386 ctx->task_reductions[1 + idx]);
5387 else
5388 off = task_reduction_read (ilist, tskred_temp, sizetype,
5389 7 + 3 * idx + 1);
5390 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5391 tskred_base, off);
5392 gimple_seq_add_stmt (ilist, g);
5394 x = fold_convert (build_pointer_type (boolean_type_node),
5395 xv);
5396 if (TREE_CONSTANT (v))
5397 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5398 TYPE_SIZE_UNIT (type));
5399 else
5401 tree t;
5402 if (is_omp_target (ctx->stmt))
5403 t = NULL_TREE;
5404 else
5405 t = maybe_lookup_decl (v, ctx);
5406 if (t)
5407 v = t;
5408 else
5409 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5410 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5411 fb_rvalue);
5412 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5413 TREE_TYPE (v), v,
5414 build_int_cst (TREE_TYPE (v), 1));
5415 t = fold_build2_loc (clause_loc, MULT_EXPR,
5416 TREE_TYPE (v), t,
5417 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5418 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5420 cond = create_tmp_var (TREE_TYPE (x));
5421 gimplify_assign (cond, x, ilist);
5422 x = xv;
5424 else if (lower_private_allocate (var, type, allocator,
5425 allocate_ptr, ilist, ctx,
5426 true,
5427 TREE_CONSTANT (v)
5428 ? TYPE_SIZE_UNIT (type)
5429 : sz))
5430 x = allocate_ptr;
5431 else if (TREE_CONSTANT (v))
5433 x = create_tmp_var_raw (type, name);
5434 gimple_add_tmp_var (x);
5435 TREE_ADDRESSABLE (x) = 1;
5436 x = build_fold_addr_expr_loc (clause_loc, x);
5438 else
5440 tree atmp
5441 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5442 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5443 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5446 tree ptype = build_pointer_type (TREE_TYPE (type));
5447 x = fold_convert_loc (clause_loc, ptype, x);
5448 tree y = create_tmp_var (ptype, name);
5449 gimplify_assign (y, x, ilist);
5450 x = y;
5451 tree yb = y;
5453 if (!integer_zerop (bias))
5455 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5456 bias);
5457 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5459 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5460 pointer_sized_int_node, yb, bias);
5461 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5462 yb = create_tmp_var (ptype, name);
5463 gimplify_assign (yb, x, ilist);
5464 x = yb;
5467 d = TREE_OPERAND (d, 0);
5468 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5469 d = TREE_OPERAND (d, 0);
5470 if (TREE_CODE (d) == ADDR_EXPR)
5472 if (orig_var != var)
5474 gcc_assert (is_variable_sized (orig_var));
5475 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5477 gimplify_assign (new_var, x, ilist);
5478 tree new_orig_var = lookup_decl (orig_var, ctx);
5479 tree t = build_fold_indirect_ref (new_var);
5480 DECL_IGNORED_P (new_var) = 0;
5481 TREE_THIS_NOTRAP (t) = 1;
5482 SET_DECL_VALUE_EXPR (new_orig_var, t);
5483 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5485 else
5487 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5488 build_int_cst (ptype, 0));
5489 SET_DECL_VALUE_EXPR (new_var, x);
5490 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5493 else
5495 gcc_assert (orig_var == var);
5496 if (TREE_CODE (d) == INDIRECT_REF)
5498 x = create_tmp_var (ptype, name);
5499 TREE_ADDRESSABLE (x) = 1;
5500 gimplify_assign (x, yb, ilist);
5501 x = build_fold_addr_expr_loc (clause_loc, x);
5503 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5504 gimplify_assign (new_var, x, ilist);
5506 /* GOMP_taskgroup_reduction_register memsets the whole
5507 array to zero. If the initializer is zero, we don't
5508 need to initialize it again, just mark it as ever
5509 used unconditionally, i.e. cond = true. */
5510 if (cond
5511 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5512 && initializer_zerop (omp_reduction_init (c,
5513 TREE_TYPE (type))))
5515 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5516 boolean_true_node);
5517 gimple_seq_add_stmt (ilist, g);
5518 continue;
5520 tree end = create_artificial_label (UNKNOWN_LOCATION);
5521 if (cond)
5523 gimple *g;
5524 if (!is_parallel_ctx (ctx))
5526 tree condv = create_tmp_var (boolean_type_node);
5527 g = gimple_build_assign (condv,
5528 build_simple_mem_ref (cond));
5529 gimple_seq_add_stmt (ilist, g);
5530 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5531 g = gimple_build_cond (NE_EXPR, condv,
5532 boolean_false_node, end, lab1);
5533 gimple_seq_add_stmt (ilist, g);
5534 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5536 g = gimple_build_assign (build_simple_mem_ref (cond),
5537 boolean_true_node);
5538 gimple_seq_add_stmt (ilist, g);
5541 tree y1 = create_tmp_var (ptype);
5542 gimplify_assign (y1, y, ilist);
5543 tree i2 = NULL_TREE, y2 = NULL_TREE;
5544 tree body2 = NULL_TREE, end2 = NULL_TREE;
5545 tree y3 = NULL_TREE, y4 = NULL_TREE;
5546 if (task_reduction_needs_orig_p)
5548 y3 = create_tmp_var (ptype);
5549 tree ref;
5550 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5551 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5552 size_int (task_reduction_cnt_full
5553 + task_reduction_cntorig - 1),
5554 NULL_TREE, NULL_TREE);
5555 else
5557 unsigned int idx = *ctx->task_reduction_map->get (c);
5558 ref = task_reduction_read (ilist, tskred_temp, ptype,
5559 7 + 3 * idx);
5561 gimplify_assign (y3, ref, ilist);
5563 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5565 if (pass != 3)
5567 y2 = create_tmp_var (ptype);
5568 gimplify_assign (y2, y, ilist);
5570 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5572 tree ref = build_outer_var_ref (var, ctx);
5573 /* For ref build_outer_var_ref already performs this. */
5574 if (TREE_CODE (d) == INDIRECT_REF)
5575 gcc_assert (omp_privatize_by_reference (var));
5576 else if (TREE_CODE (d) == ADDR_EXPR)
5577 ref = build_fold_addr_expr (ref);
5578 else if (omp_privatize_by_reference (var))
5579 ref = build_fold_addr_expr (ref);
5580 ref = fold_convert_loc (clause_loc, ptype, ref);
5581 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5582 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5584 y3 = create_tmp_var (ptype);
5585 gimplify_assign (y3, unshare_expr (ref), ilist);
5587 if (is_simd)
5589 y4 = create_tmp_var (ptype);
5590 gimplify_assign (y4, ref, dlist);
5594 tree i = create_tmp_var (TREE_TYPE (v));
5595 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5596 tree body = create_artificial_label (UNKNOWN_LOCATION);
5597 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5598 if (y2)
5600 i2 = create_tmp_var (TREE_TYPE (v));
5601 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5602 body2 = create_artificial_label (UNKNOWN_LOCATION);
5603 end2 = create_artificial_label (UNKNOWN_LOCATION);
5604 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5606 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5608 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5609 tree decl_placeholder
5610 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5611 SET_DECL_VALUE_EXPR (decl_placeholder,
5612 build_simple_mem_ref (y1));
5613 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5614 SET_DECL_VALUE_EXPR (placeholder,
5615 y3 ? build_simple_mem_ref (y3)
5616 : error_mark_node);
5617 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5618 x = lang_hooks.decls.omp_clause_default_ctor
5619 (c, build_simple_mem_ref (y1),
5620 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5621 if (x)
5622 gimplify_and_add (x, ilist);
5623 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5625 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5626 lower_omp (&tseq, ctx);
5627 gimple_seq_add_seq (ilist, tseq);
5629 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5630 if (is_simd)
5632 SET_DECL_VALUE_EXPR (decl_placeholder,
5633 build_simple_mem_ref (y2));
5634 SET_DECL_VALUE_EXPR (placeholder,
5635 build_simple_mem_ref (y4));
5636 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5637 lower_omp (&tseq, ctx);
5638 gimple_seq_add_seq (dlist, tseq);
5639 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5641 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5642 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5643 if (y2)
5645 x = lang_hooks.decls.omp_clause_dtor
5646 (c, build_simple_mem_ref (y2));
5647 if (x)
5648 gimplify_and_add (x, dlist);
5651 else
5653 x = omp_reduction_init (c, TREE_TYPE (type));
5654 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5656 /* reduction(-:var) sums up the partial results, so it
5657 acts identically to reduction(+:var). */
5658 if (code == MINUS_EXPR)
5659 code = PLUS_EXPR;
5661 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5662 if (is_simd)
5664 x = build2 (code, TREE_TYPE (type),
5665 build_simple_mem_ref (y4),
5666 build_simple_mem_ref (y2));
5667 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5670 gimple *g
5671 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5672 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5673 gimple_seq_add_stmt (ilist, g);
5674 if (y3)
5676 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5677 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5678 gimple_seq_add_stmt (ilist, g);
5680 g = gimple_build_assign (i, PLUS_EXPR, i,
5681 build_int_cst (TREE_TYPE (i), 1));
5682 gimple_seq_add_stmt (ilist, g);
5683 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5684 gimple_seq_add_stmt (ilist, g);
5685 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5686 if (y2)
5688 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5689 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5690 gimple_seq_add_stmt (dlist, g);
5691 if (y4)
5693 g = gimple_build_assign
5694 (y4, POINTER_PLUS_EXPR, y4,
5695 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5696 gimple_seq_add_stmt (dlist, g);
5698 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5699 build_int_cst (TREE_TYPE (i2), 1));
5700 gimple_seq_add_stmt (dlist, g);
5701 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5702 gimple_seq_add_stmt (dlist, g);
5703 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5705 if (allocator)
5707 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5708 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5709 gimple_seq_add_stmt (dlist, g);
5711 continue;
5713 else if (pass == 2)
5715 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5716 if (is_global_var (out))
5717 x = var;
5718 else if (is_omp_target (ctx->stmt))
5719 x = out;
5720 else
5722 bool by_ref = use_pointer_for_field (var, ctx);
5723 x = build_receiver_ref (var, by_ref, ctx);
5725 if (!omp_privatize_by_reference (var))
5726 x = build_fold_addr_expr (x);
5727 x = fold_convert (ptr_type_node, x);
5728 unsigned cnt = task_reduction_cnt - 1;
5729 if (!task_reduction_needs_orig_p)
5730 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5731 else
5732 cnt = task_reduction_cntorig - 1;
5733 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5734 size_int (cnt), NULL_TREE, NULL_TREE);
5735 gimplify_assign (r, x, ilist);
5736 continue;
5738 else if (pass == 3)
5740 tree type = TREE_TYPE (new_var);
5741 if (!omp_privatize_by_reference (var))
5742 type = build_pointer_type (type);
5743 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5745 unsigned cnt = task_reduction_cnt - 1;
5746 if (!task_reduction_needs_orig_p)
5747 cnt += (task_reduction_cntorig_full
5748 - task_reduction_cntorig);
5749 else
5750 cnt = task_reduction_cntorig - 1;
5751 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5752 size_int (cnt), NULL_TREE, NULL_TREE);
5754 else
5756 unsigned int idx = *ctx->task_reduction_map->get (c);
5757 tree off;
5758 if (ctx->task_reductions[1 + idx])
5759 off = fold_convert (sizetype,
5760 ctx->task_reductions[1 + idx]);
5761 else
5762 off = task_reduction_read (ilist, tskred_temp, sizetype,
5763 7 + 3 * idx + 1);
5764 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5765 tskred_base, off);
5767 x = fold_convert (type, x);
5768 tree t;
5769 if (omp_privatize_by_reference (var))
5771 gimplify_assign (new_var, x, ilist);
5772 t = new_var;
5773 new_var = build_simple_mem_ref (new_var);
5775 else
5777 t = create_tmp_var (type);
5778 gimplify_assign (t, x, ilist);
5779 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5780 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5782 t = fold_convert (build_pointer_type (boolean_type_node), t);
5783 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5784 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5785 cond = create_tmp_var (TREE_TYPE (t));
5786 gimplify_assign (cond, t, ilist);
5788 else if (is_variable_sized (var))
5790 /* For variable sized types, we need to allocate the
5791 actual storage here. Call alloca and store the
5792 result in the pointer decl that we created elsewhere. */
5793 if (pass == 0)
5794 continue;
5796 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5798 tree tmp;
5800 ptr = DECL_VALUE_EXPR (new_var);
5801 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5802 ptr = TREE_OPERAND (ptr, 0);
5803 gcc_assert (DECL_P (ptr));
5804 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5806 if (lower_private_allocate (var, new_var, allocator,
5807 allocate_ptr, ilist, ctx,
5808 false, x))
5809 tmp = allocate_ptr;
5810 else
5812 /* void *tmp = __builtin_alloca */
5813 tree atmp
5814 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5815 gcall *stmt
5816 = gimple_build_call (atmp, 2, x,
5817 size_int (DECL_ALIGN (var)));
5818 cfun->calls_alloca = 1;
5819 tmp = create_tmp_var_raw (ptr_type_node);
5820 gimple_add_tmp_var (tmp);
5821 gimple_call_set_lhs (stmt, tmp);
5823 gimple_seq_add_stmt (ilist, stmt);
5826 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5827 gimplify_assign (ptr, x, ilist);
5830 else if (omp_privatize_by_reference (var)
5831 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5832 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5834 /* For references that are being privatized for Fortran,
5835 allocate new backing storage for the new pointer
5836 variable. This allows us to avoid changing all the
5837 code that expects a pointer to something that expects
5838 a direct variable. */
5839 if (pass == 0)
5840 continue;
5842 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5843 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5845 x = build_receiver_ref (var, false, ctx);
5846 if (ctx->allocate_map)
5847 if (tree *allocatep = ctx->allocate_map->get (var))
5849 allocator = *allocatep;
5850 if (TREE_CODE (allocator) == TREE_LIST)
5851 allocator = TREE_PURPOSE (allocator);
5852 if (TREE_CODE (allocator) != INTEGER_CST)
5853 allocator = build_outer_var_ref (allocator, ctx);
5854 allocator = fold_convert (pointer_sized_int_node,
5855 allocator);
5856 allocate_ptr = unshare_expr (x);
5858 if (allocator == NULL_TREE)
5859 x = build_fold_addr_expr_loc (clause_loc, x);
5861 else if (lower_private_allocate (var, new_var, allocator,
5862 allocate_ptr,
5863 ilist, ctx, true, x))
5864 x = allocate_ptr;
5865 else if (TREE_CONSTANT (x))
5867 /* For reduction in SIMD loop, defer adding the
5868 initialization of the reference, because if we decide
5869 to use SIMD array for it, the initilization could cause
5870 expansion ICE. Ditto for other privatization clauses. */
5871 if (is_simd)
5872 x = NULL_TREE;
5873 else
5875 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5876 get_name (var));
5877 gimple_add_tmp_var (x);
5878 TREE_ADDRESSABLE (x) = 1;
5879 x = build_fold_addr_expr_loc (clause_loc, x);
5882 else
5884 tree atmp
5885 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5886 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5887 tree al = size_int (TYPE_ALIGN (rtype));
5888 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5891 if (x)
5893 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5894 gimplify_assign (new_var, x, ilist);
5897 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5899 else if ((c_kind == OMP_CLAUSE_REDUCTION
5900 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5901 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5903 if (pass == 0)
5904 continue;
5906 else if (pass != 0)
5907 continue;
5909 switch (OMP_CLAUSE_CODE (c))
5911 case OMP_CLAUSE_SHARED:
5912 /* Ignore shared directives in teams construct inside
5913 target construct. */
5914 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5915 && !is_host_teams_ctx (ctx))
5916 continue;
5917 /* Shared global vars are just accessed directly. */
5918 if (is_global_var (new_var))
5919 break;
5920 /* For taskloop firstprivate/lastprivate, represented
5921 as firstprivate and shared clause on the task, new_var
5922 is the firstprivate var. */
5923 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5924 break;
5925 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5926 needs to be delayed until after fixup_child_record_type so
5927 that we get the correct type during the dereference. */
5928 by_ref = use_pointer_for_field (var, ctx);
5929 x = build_receiver_ref (var, by_ref, ctx);
5930 SET_DECL_VALUE_EXPR (new_var, x);
5931 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5933 /* ??? If VAR is not passed by reference, and the variable
5934 hasn't been initialized yet, then we'll get a warning for
5935 the store into the omp_data_s structure. Ideally, we'd be
5936 able to notice this and not store anything at all, but
5937 we're generating code too early. Suppress the warning. */
5938 if (!by_ref)
5939 suppress_warning (var, OPT_Wuninitialized);
5940 break;
5942 case OMP_CLAUSE__CONDTEMP_:
5943 if (is_parallel_ctx (ctx))
5945 x = build_receiver_ref (var, false, ctx);
5946 SET_DECL_VALUE_EXPR (new_var, x);
5947 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5949 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5951 x = build_zero_cst (TREE_TYPE (var));
5952 goto do_private;
5954 break;
5956 case OMP_CLAUSE_LASTPRIVATE:
5957 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5958 break;
5959 /* FALLTHRU */
5961 case OMP_CLAUSE_PRIVATE:
5962 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5963 x = build_outer_var_ref (var, ctx);
5964 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5966 if (is_task_ctx (ctx))
5967 x = build_receiver_ref (var, false, ctx);
5968 else
5969 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5971 else
5972 x = NULL;
5973 do_private:
5974 tree nx;
5975 bool copy_ctor;
5976 copy_ctor = false;
5977 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5978 ilist, ctx, false, NULL_TREE);
5979 nx = unshare_expr (new_var);
5980 if (is_simd
5981 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5982 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5983 copy_ctor = true;
5984 if (copy_ctor)
5985 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5986 else
5987 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5988 if (is_simd)
5990 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5991 if ((TREE_ADDRESSABLE (new_var) || nx || y
5992 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5993 && (gimple_omp_for_collapse (ctx->stmt) != 1
5994 || (gimple_omp_for_index (ctx->stmt, 0)
5995 != new_var)))
5996 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5997 || omp_privatize_by_reference (var))
5998 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5999 ivar, lvar))
6001 if (omp_privatize_by_reference (var))
6003 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6004 tree new_vard = TREE_OPERAND (new_var, 0);
6005 gcc_assert (DECL_P (new_vard));
6006 SET_DECL_VALUE_EXPR (new_vard,
6007 build_fold_addr_expr (lvar));
6008 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6011 if (nx)
6013 tree iv = unshare_expr (ivar);
6014 if (copy_ctor)
6015 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
6017 else
6018 x = lang_hooks.decls.omp_clause_default_ctor (c,
6022 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
6024 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
6025 unshare_expr (ivar), x);
6026 nx = x;
6028 if (nx && x)
6029 gimplify_and_add (x, &llist[0]);
6030 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6031 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6033 tree v = new_var;
6034 if (!DECL_P (v))
6036 gcc_assert (TREE_CODE (v) == MEM_REF);
6037 v = TREE_OPERAND (v, 0);
6038 gcc_assert (DECL_P (v));
6040 v = *ctx->lastprivate_conditional_map->get (v);
6041 tree t = create_tmp_var (TREE_TYPE (v));
6042 tree z = build_zero_cst (TREE_TYPE (v));
6043 tree orig_v
6044 = build_outer_var_ref (var, ctx,
6045 OMP_CLAUSE_LASTPRIVATE);
6046 gimple_seq_add_stmt (dlist,
6047 gimple_build_assign (t, z));
6048 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
6049 tree civar = DECL_VALUE_EXPR (v);
6050 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
6051 civar = unshare_expr (civar);
6052 TREE_OPERAND (civar, 1) = sctx.idx;
6053 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
6054 unshare_expr (civar));
6055 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
6056 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
6057 orig_v, unshare_expr (ivar)));
6058 tree cond = build2 (LT_EXPR, boolean_type_node, t,
6059 civar);
6060 x = build3 (COND_EXPR, void_type_node, cond, x,
6061 void_node);
6062 gimple_seq tseq = NULL;
6063 gimplify_and_add (x, &tseq);
6064 if (ctx->outer)
6065 lower_omp (&tseq, ctx->outer);
6066 gimple_seq_add_seq (&llist[1], tseq);
6068 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6069 && ctx->for_simd_scan_phase)
6071 x = unshare_expr (ivar);
6072 tree orig_v
6073 = build_outer_var_ref (var, ctx,
6074 OMP_CLAUSE_LASTPRIVATE);
6075 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6076 orig_v);
6077 gimplify_and_add (x, &llist[0]);
6079 if (y)
6081 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
6082 if (y)
6083 gimplify_and_add (y, &llist[1]);
6085 break;
6087 if (omp_privatize_by_reference (var))
6089 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6090 tree new_vard = TREE_OPERAND (new_var, 0);
6091 gcc_assert (DECL_P (new_vard));
6092 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6093 x = TYPE_SIZE_UNIT (type);
6094 if (TREE_CONSTANT (x))
6096 x = create_tmp_var_raw (type, get_name (var));
6097 gimple_add_tmp_var (x);
6098 TREE_ADDRESSABLE (x) = 1;
6099 x = build_fold_addr_expr_loc (clause_loc, x);
6100 x = fold_convert_loc (clause_loc,
6101 TREE_TYPE (new_vard), x);
6102 gimplify_assign (new_vard, x, ilist);
6106 if (nx)
6107 gimplify_and_add (nx, ilist);
6108 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6109 && is_simd
6110 && ctx->for_simd_scan_phase)
6112 tree orig_v = build_outer_var_ref (var, ctx,
6113 OMP_CLAUSE_LASTPRIVATE);
6114 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
6115 orig_v);
6116 gimplify_and_add (x, ilist);
6118 /* FALLTHRU */
6120 do_dtor:
6121 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
6122 if (x)
6123 gimplify_and_add (x, dlist);
6124 if (allocator)
6126 if (!is_gimple_val (allocator))
6128 tree avar = create_tmp_var (TREE_TYPE (allocator));
6129 gimplify_assign (avar, allocator, dlist);
6130 allocator = avar;
6132 if (!is_gimple_val (allocate_ptr))
6134 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6135 gimplify_assign (apvar, allocate_ptr, dlist);
6136 allocate_ptr = apvar;
6138 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
6139 gimple *g
6140 = gimple_build_call (f, 2, allocate_ptr, allocator);
6141 gimple_seq_add_stmt (dlist, g);
6143 break;
6145 case OMP_CLAUSE_LINEAR:
6146 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6147 goto do_firstprivate;
6148 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6149 x = NULL;
6150 else
6151 x = build_outer_var_ref (var, ctx);
6152 goto do_private;
6154 case OMP_CLAUSE_FIRSTPRIVATE:
6155 if (is_task_ctx (ctx))
6157 if ((omp_privatize_by_reference (var)
6158 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6159 || is_variable_sized (var))
6160 goto do_dtor;
6161 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
6162 ctx))
6163 || use_pointer_for_field (var, NULL))
6165 x = build_receiver_ref (var, false, ctx);
6166 if (ctx->allocate_map)
6167 if (tree *allocatep = ctx->allocate_map->get (var))
6169 allocator = *allocatep;
6170 if (TREE_CODE (allocator) == TREE_LIST)
6171 allocator = TREE_PURPOSE (allocator);
6172 if (TREE_CODE (allocator) != INTEGER_CST)
6173 allocator = build_outer_var_ref (allocator, ctx);
6174 allocator = fold_convert (pointer_sized_int_node,
6175 allocator);
6176 allocate_ptr = unshare_expr (x);
6177 x = build_simple_mem_ref (x);
6178 TREE_THIS_NOTRAP (x) = 1;
6180 SET_DECL_VALUE_EXPR (new_var, x);
6181 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6182 goto do_dtor;
6185 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6186 && omp_privatize_by_reference (var))
6188 x = build_outer_var_ref (var, ctx);
6189 gcc_assert (TREE_CODE (x) == MEM_REF
6190 && integer_zerop (TREE_OPERAND (x, 1)));
6191 x = TREE_OPERAND (x, 0);
6192 x = lang_hooks.decls.omp_clause_copy_ctor
6193 (c, unshare_expr (new_var), x);
6194 gimplify_and_add (x, ilist);
6195 goto do_dtor;
6197 do_firstprivate:
6198 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6199 ilist, ctx, false, NULL_TREE);
6200 x = build_outer_var_ref (var, ctx);
6201 if (is_simd)
6203 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6204 && gimple_omp_for_combined_into_p (ctx->stmt))
6206 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6207 if (DECL_P (t))
6208 t = build_outer_var_ref (t, ctx);
6209 tree stept = TREE_TYPE (t);
6210 tree ct = omp_find_clause (clauses,
6211 OMP_CLAUSE__LOOPTEMP_);
6212 gcc_assert (ct);
6213 tree l = OMP_CLAUSE_DECL (ct);
6214 tree n1 = fd->loop.n1;
6215 tree step = fd->loop.step;
6216 tree itype = TREE_TYPE (l);
6217 if (POINTER_TYPE_P (itype))
6218 itype = signed_type_for (itype);
6219 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6220 if (TYPE_UNSIGNED (itype)
6221 && fd->loop.cond_code == GT_EXPR)
6222 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6223 fold_build1 (NEGATE_EXPR, itype, l),
6224 fold_build1 (NEGATE_EXPR,
6225 itype, step));
6226 else
6227 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6228 t = fold_build2 (MULT_EXPR, stept,
6229 fold_convert (stept, l), t);
6231 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6233 if (omp_privatize_by_reference (var))
6235 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6236 tree new_vard = TREE_OPERAND (new_var, 0);
6237 gcc_assert (DECL_P (new_vard));
6238 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6239 nx = TYPE_SIZE_UNIT (type);
6240 if (TREE_CONSTANT (nx))
6242 nx = create_tmp_var_raw (type,
6243 get_name (var));
6244 gimple_add_tmp_var (nx);
6245 TREE_ADDRESSABLE (nx) = 1;
6246 nx = build_fold_addr_expr_loc (clause_loc,
6247 nx);
6248 nx = fold_convert_loc (clause_loc,
6249 TREE_TYPE (new_vard),
6250 nx);
6251 gimplify_assign (new_vard, nx, ilist);
6255 x = lang_hooks.decls.omp_clause_linear_ctor
6256 (c, new_var, x, t);
6257 gimplify_and_add (x, ilist);
6258 goto do_dtor;
6261 if (POINTER_TYPE_P (TREE_TYPE (x)))
6262 x = fold_build_pointer_plus (x, t);
6263 else
6264 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x,
6265 fold_convert (TREE_TYPE (x), t));
6268 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6269 || TREE_ADDRESSABLE (new_var)
6270 || omp_privatize_by_reference (var))
6271 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6272 ivar, lvar))
6274 if (omp_privatize_by_reference (var))
6276 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6277 tree new_vard = TREE_OPERAND (new_var, 0);
6278 gcc_assert (DECL_P (new_vard));
6279 SET_DECL_VALUE_EXPR (new_vard,
6280 build_fold_addr_expr (lvar));
6281 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6283 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6285 tree iv = create_tmp_var (TREE_TYPE (new_var));
6286 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6287 gimplify_and_add (x, ilist);
6288 gimple_stmt_iterator gsi
6289 = gsi_start (*gimple_omp_body_ptr (ctx->stmt));
6290 gassign *g
6291 = gimple_build_assign (unshare_expr (lvar), iv);
6292 gsi_insert_before_without_update (&gsi, g,
6293 GSI_SAME_STMT);
6294 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6295 enum tree_code code = PLUS_EXPR;
6296 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6297 code = POINTER_PLUS_EXPR;
6298 g = gimple_build_assign (iv, code, iv, t);
6299 gsi_insert_before_without_update (&gsi, g,
6300 GSI_SAME_STMT);
6301 break;
6303 x = lang_hooks.decls.omp_clause_copy_ctor
6304 (c, unshare_expr (ivar), x);
6305 gimplify_and_add (x, &llist[0]);
6306 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6307 if (x)
6308 gimplify_and_add (x, &llist[1]);
6309 break;
6311 if (omp_privatize_by_reference (var))
6313 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6314 tree new_vard = TREE_OPERAND (new_var, 0);
6315 gcc_assert (DECL_P (new_vard));
6316 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6317 nx = TYPE_SIZE_UNIT (type);
6318 if (TREE_CONSTANT (nx))
6320 nx = create_tmp_var_raw (type, get_name (var));
6321 gimple_add_tmp_var (nx);
6322 TREE_ADDRESSABLE (nx) = 1;
6323 nx = build_fold_addr_expr_loc (clause_loc, nx);
6324 nx = fold_convert_loc (clause_loc,
6325 TREE_TYPE (new_vard), nx);
6326 gimplify_assign (new_vard, nx, ilist);
6330 x = lang_hooks.decls.omp_clause_copy_ctor
6331 (c, unshare_expr (new_var), x);
6332 gimplify_and_add (x, ilist);
6333 goto do_dtor;
6335 case OMP_CLAUSE__LOOPTEMP_:
6336 case OMP_CLAUSE__REDUCTEMP_:
6337 gcc_assert (is_taskreg_ctx (ctx));
6338 x = build_outer_var_ref (var, ctx);
6339 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6340 gimplify_and_add (x, ilist);
6341 break;
6343 case OMP_CLAUSE_COPYIN:
6344 by_ref = use_pointer_for_field (var, NULL);
6345 x = build_receiver_ref (var, by_ref, ctx);
6346 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6347 append_to_statement_list (x, &copyin_seq);
6348 copyin_by_ref |= by_ref;
6349 break;
6351 case OMP_CLAUSE_REDUCTION:
6352 case OMP_CLAUSE_IN_REDUCTION:
6353 /* OpenACC reductions are initialized using the
6354 GOACC_REDUCTION internal function. */
6355 if (is_gimple_omp_oacc (ctx->stmt))
6356 break;
6357 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6359 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6360 gimple *tseq;
6361 tree ptype = TREE_TYPE (placeholder);
6362 if (cond)
6364 x = error_mark_node;
6365 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6366 && !task_reduction_needs_orig_p)
6367 x = var;
6368 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6370 tree pptype = build_pointer_type (ptype);
6371 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6372 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6373 size_int (task_reduction_cnt_full
6374 + task_reduction_cntorig - 1),
6375 NULL_TREE, NULL_TREE);
6376 else
6378 unsigned int idx
6379 = *ctx->task_reduction_map->get (c);
6380 x = task_reduction_read (ilist, tskred_temp,
6381 pptype, 7 + 3 * idx);
6383 x = fold_convert (pptype, x);
6384 x = build_simple_mem_ref (x);
6387 else
6389 lower_private_allocate (var, new_var, allocator,
6390 allocate_ptr, ilist, ctx, false,
6391 NULL_TREE);
6392 x = build_outer_var_ref (var, ctx);
6394 if (omp_privatize_by_reference (var)
6395 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6396 x = build_fold_addr_expr_loc (clause_loc, x);
6398 SET_DECL_VALUE_EXPR (placeholder, x);
6399 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6400 tree new_vard = new_var;
6401 if (omp_privatize_by_reference (var))
6403 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6404 new_vard = TREE_OPERAND (new_var, 0);
6405 gcc_assert (DECL_P (new_vard));
6407 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6408 if (is_simd
6409 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6410 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6411 rvarp = &rvar;
6412 if (is_simd
6413 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6414 ivar, lvar, rvarp,
6415 &rvar2))
6417 if (new_vard == new_var)
6419 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6420 SET_DECL_VALUE_EXPR (new_var, ivar);
6422 else
6424 SET_DECL_VALUE_EXPR (new_vard,
6425 build_fold_addr_expr (ivar));
6426 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6428 x = lang_hooks.decls.omp_clause_default_ctor
6429 (c, unshare_expr (ivar),
6430 build_outer_var_ref (var, ctx));
6431 if (rvarp && ctx->for_simd_scan_phase)
6433 if (x)
6434 gimplify_and_add (x, &llist[0]);
6435 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6436 if (x)
6437 gimplify_and_add (x, &llist[1]);
6438 break;
6440 else if (rvarp)
6442 if (x)
6444 gimplify_and_add (x, &llist[0]);
6446 tree ivar2 = unshare_expr (lvar);
6447 TREE_OPERAND (ivar2, 1) = sctx.idx;
6448 x = lang_hooks.decls.omp_clause_default_ctor
6449 (c, ivar2, build_outer_var_ref (var, ctx));
6450 gimplify_and_add (x, &llist[0]);
6452 if (rvar2)
6454 x = lang_hooks.decls.omp_clause_default_ctor
6455 (c, unshare_expr (rvar2),
6456 build_outer_var_ref (var, ctx));
6457 gimplify_and_add (x, &llist[0]);
6460 /* For types that need construction, add another
6461 private var which will be default constructed
6462 and optionally initialized with
6463 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6464 loop we want to assign this value instead of
6465 constructing and destructing it in each
6466 iteration. */
6467 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6468 gimple_add_tmp_var (nv);
6469 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6470 ? rvar2
6471 : ivar, 0),
6472 nv);
6473 x = lang_hooks.decls.omp_clause_default_ctor
6474 (c, nv, build_outer_var_ref (var, ctx));
6475 gimplify_and_add (x, ilist);
6477 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6479 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6480 x = DECL_VALUE_EXPR (new_vard);
6481 tree vexpr = nv;
6482 if (new_vard != new_var)
6483 vexpr = build_fold_addr_expr (nv);
6484 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6485 lower_omp (&tseq, ctx);
6486 SET_DECL_VALUE_EXPR (new_vard, x);
6487 gimple_seq_add_seq (ilist, tseq);
6488 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6491 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6492 if (x)
6493 gimplify_and_add (x, dlist);
6496 tree ref = build_outer_var_ref (var, ctx);
6497 x = unshare_expr (ivar);
6498 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6499 ref);
6500 gimplify_and_add (x, &llist[0]);
6502 ref = build_outer_var_ref (var, ctx);
6503 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6504 rvar);
6505 gimplify_and_add (x, &llist[3]);
6507 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6508 if (new_vard == new_var)
6509 SET_DECL_VALUE_EXPR (new_var, lvar);
6510 else
6511 SET_DECL_VALUE_EXPR (new_vard,
6512 build_fold_addr_expr (lvar));
6514 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6515 if (x)
6516 gimplify_and_add (x, &llist[1]);
6518 tree ivar2 = unshare_expr (lvar);
6519 TREE_OPERAND (ivar2, 1) = sctx.idx;
6520 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6521 if (x)
6522 gimplify_and_add (x, &llist[1]);
6524 if (rvar2)
6526 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6527 if (x)
6528 gimplify_and_add (x, &llist[1]);
6530 break;
6532 if (x)
6533 gimplify_and_add (x, &llist[0]);
6534 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6536 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6537 lower_omp (&tseq, ctx);
6538 gimple_seq_add_seq (&llist[0], tseq);
6540 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6541 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6542 lower_omp (&tseq, ctx);
6543 gimple_seq_add_seq (&llist[1], tseq);
6544 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6545 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6546 if (new_vard == new_var)
6547 SET_DECL_VALUE_EXPR (new_var, lvar);
6548 else
6549 SET_DECL_VALUE_EXPR (new_vard,
6550 build_fold_addr_expr (lvar));
6551 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6552 if (x)
6553 gimplify_and_add (x, &llist[1]);
6554 break;
6556 /* If this is a reference to constant size reduction var
6557 with placeholder, we haven't emitted the initializer
6558 for it because it is undesirable if SIMD arrays are used.
6559 But if they aren't used, we need to emit the deferred
6560 initialization now. */
6561 else if (omp_privatize_by_reference (var) && is_simd)
6562 handle_simd_reference (clause_loc, new_vard, ilist);
6564 tree lab2 = NULL_TREE;
6565 if (cond)
6567 gimple *g;
6568 if (!is_parallel_ctx (ctx))
6570 tree condv = create_tmp_var (boolean_type_node);
6571 tree m = build_simple_mem_ref (cond);
6572 g = gimple_build_assign (condv, m);
6573 gimple_seq_add_stmt (ilist, g);
6574 tree lab1
6575 = create_artificial_label (UNKNOWN_LOCATION);
6576 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6577 g = gimple_build_cond (NE_EXPR, condv,
6578 boolean_false_node,
6579 lab2, lab1);
6580 gimple_seq_add_stmt (ilist, g);
6581 gimple_seq_add_stmt (ilist,
6582 gimple_build_label (lab1));
6584 g = gimple_build_assign (build_simple_mem_ref (cond),
6585 boolean_true_node);
6586 gimple_seq_add_stmt (ilist, g);
6588 x = lang_hooks.decls.omp_clause_default_ctor
6589 (c, unshare_expr (new_var),
6590 cond ? NULL_TREE
6591 : build_outer_var_ref (var, ctx));
6592 if (x)
6593 gimplify_and_add (x, ilist);
6595 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6596 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6598 if (ctx->for_simd_scan_phase)
6599 goto do_dtor;
6600 if (x || (!is_simd
6601 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6603 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6604 gimple_add_tmp_var (nv);
6605 ctx->cb.decl_map->put (new_vard, nv);
6606 x = lang_hooks.decls.omp_clause_default_ctor
6607 (c, nv, build_outer_var_ref (var, ctx));
6608 if (x)
6609 gimplify_and_add (x, ilist);
6610 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6612 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6613 tree vexpr = nv;
6614 if (new_vard != new_var)
6615 vexpr = build_fold_addr_expr (nv);
6616 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6617 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6618 lower_omp (&tseq, ctx);
6619 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6620 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6621 gimple_seq_add_seq (ilist, tseq);
6623 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6624 if (is_simd && ctx->scan_exclusive)
6626 tree nv2
6627 = create_tmp_var_raw (TREE_TYPE (new_var));
6628 gimple_add_tmp_var (nv2);
6629 ctx->cb.decl_map->put (nv, nv2);
6630 x = lang_hooks.decls.omp_clause_default_ctor
6631 (c, nv2, build_outer_var_ref (var, ctx));
6632 gimplify_and_add (x, ilist);
6633 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6634 if (x)
6635 gimplify_and_add (x, dlist);
6637 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6638 if (x)
6639 gimplify_and_add (x, dlist);
6641 else if (is_simd
6642 && ctx->scan_exclusive
6643 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6645 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6646 gimple_add_tmp_var (nv2);
6647 ctx->cb.decl_map->put (new_vard, nv2);
6648 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6649 if (x)
6650 gimplify_and_add (x, dlist);
6652 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6653 goto do_dtor;
6656 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6658 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6659 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6660 && is_omp_target (ctx->stmt))
6662 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6663 tree oldv = NULL_TREE;
6664 gcc_assert (d);
6665 if (DECL_HAS_VALUE_EXPR_P (d))
6666 oldv = DECL_VALUE_EXPR (d);
6667 SET_DECL_VALUE_EXPR (d, new_vard);
6668 DECL_HAS_VALUE_EXPR_P (d) = 1;
6669 lower_omp (&tseq, ctx);
6670 if (oldv)
6671 SET_DECL_VALUE_EXPR (d, oldv);
6672 else
6674 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6675 DECL_HAS_VALUE_EXPR_P (d) = 0;
6678 else
6679 lower_omp (&tseq, ctx);
6680 gimple_seq_add_seq (ilist, tseq);
6682 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6683 if (is_simd)
6685 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6686 lower_omp (&tseq, ctx);
6687 gimple_seq_add_seq (dlist, tseq);
6688 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6690 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6691 if (cond)
6693 if (lab2)
6694 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6695 break;
6697 goto do_dtor;
6699 else
6701 x = omp_reduction_init (c, TREE_TYPE (new_var));
6702 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6703 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6705 if (cond)
6707 gimple *g;
6708 tree lab2 = NULL_TREE;
6709 /* GOMP_taskgroup_reduction_register memsets the whole
6710 array to zero. If the initializer is zero, we don't
6711 need to initialize it again, just mark it as ever
6712 used unconditionally, i.e. cond = true. */
6713 if (initializer_zerop (x))
6715 g = gimple_build_assign (build_simple_mem_ref (cond),
6716 boolean_true_node);
6717 gimple_seq_add_stmt (ilist, g);
6718 break;
6721 /* Otherwise, emit
6722 if (!cond) { cond = true; new_var = x; } */
6723 if (!is_parallel_ctx (ctx))
6725 tree condv = create_tmp_var (boolean_type_node);
6726 tree m = build_simple_mem_ref (cond);
6727 g = gimple_build_assign (condv, m);
6728 gimple_seq_add_stmt (ilist, g);
6729 tree lab1
6730 = create_artificial_label (UNKNOWN_LOCATION);
6731 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6732 g = gimple_build_cond (NE_EXPR, condv,
6733 boolean_false_node,
6734 lab2, lab1);
6735 gimple_seq_add_stmt (ilist, g);
6736 gimple_seq_add_stmt (ilist,
6737 gimple_build_label (lab1));
6739 g = gimple_build_assign (build_simple_mem_ref (cond),
6740 boolean_true_node);
6741 gimple_seq_add_stmt (ilist, g);
6742 gimplify_assign (new_var, x, ilist);
6743 if (lab2)
6744 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6745 break;
6748 /* reduction(-:var) sums up the partial results, so it
6749 acts identically to reduction(+:var). */
6750 if (code == MINUS_EXPR)
6751 code = PLUS_EXPR;
6753 bool is_truth_op
6754 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6755 tree new_vard = new_var;
6756 if (is_simd && omp_privatize_by_reference (var))
6758 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6759 new_vard = TREE_OPERAND (new_var, 0);
6760 gcc_assert (DECL_P (new_vard));
6762 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6763 if (is_simd
6764 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6765 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6766 rvarp = &rvar;
6767 if (is_simd
6768 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6769 ivar, lvar, rvarp,
6770 &rvar2))
6772 if (new_vard != new_var)
6774 SET_DECL_VALUE_EXPR (new_vard,
6775 build_fold_addr_expr (lvar));
6776 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6779 tree ref = build_outer_var_ref (var, ctx);
6781 if (rvarp)
6783 if (ctx->for_simd_scan_phase)
6784 break;
6785 gimplify_assign (ivar, ref, &llist[0]);
6786 ref = build_outer_var_ref (var, ctx);
6787 gimplify_assign (ref, rvar, &llist[3]);
6788 break;
6791 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6793 if (sctx.is_simt)
6795 if (!simt_lane)
6796 simt_lane = create_tmp_var (unsigned_type_node);
6797 x = build_call_expr_internal_loc
6798 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6799 TREE_TYPE (ivar), 2, ivar, simt_lane);
6800 /* Make sure x is evaluated unconditionally. */
6801 tree bfly_var = create_tmp_var (TREE_TYPE (ivar));
6802 gimplify_assign (bfly_var, x, &llist[2]);
6803 x = build2 (code, TREE_TYPE (ivar), ivar, bfly_var);
6804 gimplify_assign (ivar, x, &llist[2]);
6806 tree ivar2 = ivar;
6807 tree ref2 = ref;
6808 if (is_truth_op)
6810 tree zero = build_zero_cst (TREE_TYPE (ivar));
6811 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6812 boolean_type_node, ivar,
6813 zero);
6814 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6815 boolean_type_node, ref,
6816 zero);
6818 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6819 if (is_truth_op)
6820 x = fold_convert (TREE_TYPE (ref), x);
6821 ref = build_outer_var_ref (var, ctx);
6822 gimplify_assign (ref, x, &llist[1]);
6825 else
6827 lower_private_allocate (var, new_var, allocator,
6828 allocate_ptr, ilist, ctx,
6829 false, NULL_TREE);
6830 if (omp_privatize_by_reference (var) && is_simd)
6831 handle_simd_reference (clause_loc, new_vard, ilist);
6832 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6833 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6834 break;
6835 gimplify_assign (new_var, x, ilist);
6836 if (is_simd)
6838 tree ref = build_outer_var_ref (var, ctx);
6839 tree new_var2 = new_var;
6840 tree ref2 = ref;
6841 if (is_truth_op)
6843 tree zero = build_zero_cst (TREE_TYPE (new_var));
6844 new_var2
6845 = fold_build2_loc (clause_loc, NE_EXPR,
6846 boolean_type_node, new_var,
6847 zero);
6848 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6849 boolean_type_node, ref,
6850 zero);
6852 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6853 if (is_truth_op)
6854 x = fold_convert (TREE_TYPE (new_var), x);
6855 ref = build_outer_var_ref (var, ctx);
6856 gimplify_assign (ref, x, dlist);
6858 if (allocator)
6859 goto do_dtor;
6862 break;
6864 default:
6865 gcc_unreachable ();
6869 if (tskred_avar)
6871 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6872 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6875 if (known_eq (sctx.max_vf, 1U))
6877 sctx.is_simt = false;
6878 if (ctx->lastprivate_conditional_map)
6880 if (gimple_omp_for_combined_into_p (ctx->stmt))
6882 /* Signal to lower_omp_1 that it should use parent context. */
6883 ctx->combined_into_simd_safelen1 = true;
6884 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6885 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6886 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6888 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6889 omp_context *outer = ctx->outer;
6890 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6891 outer = outer->outer;
6892 tree *v = ctx->lastprivate_conditional_map->get (o);
6893 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6894 tree *pv = outer->lastprivate_conditional_map->get (po);
6895 *v = *pv;
6898 else
6900 /* When not vectorized, treat lastprivate(conditional:) like
6901 normal lastprivate, as there will be just one simd lane
6902 writing the privatized variable. */
6903 delete ctx->lastprivate_conditional_map;
6904 ctx->lastprivate_conditional_map = NULL;
6909 if (nonconst_simd_if)
6911 if (sctx.lane == NULL_TREE)
6913 sctx.idx = create_tmp_var (unsigned_type_node);
6914 sctx.lane = create_tmp_var (unsigned_type_node);
6916 /* FIXME: For now. */
6917 sctx.is_simt = false;
6920 if (sctx.lane || sctx.is_simt)
6922 uid = create_tmp_var (ptr_type_node, "simduid");
6923 /* Don't want uninit warnings on simduid, it is always uninitialized,
6924 but we use it not for the value, but for the DECL_UID only. */
6925 suppress_warning (uid, OPT_Wuninitialized);
6926 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6927 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6928 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6929 gimple_omp_for_set_clauses (ctx->stmt, c);
6931 /* Emit calls denoting privatized variables and initializing a pointer to
6932 structure that holds private variables as fields after ompdevlow pass. */
6933 if (sctx.is_simt)
6935 sctx.simt_eargs[0] = uid;
6936 gimple *g
6937 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6938 gimple_call_set_lhs (g, uid);
6939 gimple_seq_add_stmt (ilist, g);
6940 sctx.simt_eargs.release ();
6942 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6943 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6944 gimple_call_set_lhs (g, simtrec);
6945 gimple_seq_add_stmt (ilist, g);
6947 if (sctx.lane)
6949 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6950 2 + (nonconst_simd_if != NULL),
6951 uid, integer_zero_node,
6952 nonconst_simd_if);
6953 gimple_call_set_lhs (g, sctx.lane);
6954 gimple_stmt_iterator gsi = gsi_start (*gimple_omp_body_ptr (ctx->stmt));
6955 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6956 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6957 build_int_cst (unsigned_type_node, 0));
6958 gimple_seq_add_stmt (ilist, g);
6959 if (sctx.lastlane)
6961 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6962 2, uid, sctx.lane);
6963 gimple_call_set_lhs (g, sctx.lastlane);
6964 gimple_seq_add_stmt (dlist, g);
6965 gimple_seq_add_seq (dlist, llist[3]);
6967 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6968 if (llist[2])
6970 tree simt_vf = create_tmp_var (unsigned_type_node);
6971 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6972 gimple_call_set_lhs (g, simt_vf);
6973 gimple_seq_add_stmt (dlist, g);
6975 tree t = build_int_cst (unsigned_type_node, 1);
6976 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6977 gimple_seq_add_stmt (dlist, g);
6979 t = build_int_cst (unsigned_type_node, 0);
6980 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6981 gimple_seq_add_stmt (dlist, g);
6983 tree body = create_artificial_label (UNKNOWN_LOCATION);
6984 tree header = create_artificial_label (UNKNOWN_LOCATION);
6985 tree end = create_artificial_label (UNKNOWN_LOCATION);
6986 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6987 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6989 gimple_seq_add_seq (dlist, llist[2]);
6991 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6992 gimple_seq_add_stmt (dlist, g);
6994 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6995 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6996 gimple_seq_add_stmt (dlist, g);
6998 gimple_seq_add_stmt (dlist, gimple_build_label (end));
7000 for (int i = 0; i < 2; i++)
7001 if (llist[i])
7003 tree vf = create_tmp_var (unsigned_type_node);
7004 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
7005 gimple_call_set_lhs (g, vf);
7006 gimple_seq *seq = i == 0 ? ilist : dlist;
7007 gimple_seq_add_stmt (seq, g);
7008 tree t = build_int_cst (unsigned_type_node, 0);
7009 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
7010 gimple_seq_add_stmt (seq, g);
7011 tree body = create_artificial_label (UNKNOWN_LOCATION);
7012 tree header = create_artificial_label (UNKNOWN_LOCATION);
7013 tree end = create_artificial_label (UNKNOWN_LOCATION);
7014 gimple_seq_add_stmt (seq, gimple_build_goto (header));
7015 gimple_seq_add_stmt (seq, gimple_build_label (body));
7016 gimple_seq_add_seq (seq, llist[i]);
7017 t = build_int_cst (unsigned_type_node, 1);
7018 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
7019 gimple_seq_add_stmt (seq, g);
7020 gimple_seq_add_stmt (seq, gimple_build_label (header));
7021 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
7022 gimple_seq_add_stmt (seq, g);
7023 gimple_seq_add_stmt (seq, gimple_build_label (end));
7026 if (sctx.is_simt)
7028 gimple_seq_add_seq (dlist, sctx.simt_dlist);
7029 gimple *g
7030 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
7031 gimple_seq_add_stmt (dlist, g);
7034 /* The copyin sequence is not to be executed by the main thread, since
7035 that would result in self-copies. Perhaps not visible to scalars,
7036 but it certainly is to C++ operator=. */
7037 if (copyin_seq)
7039 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
7041 x = build2 (NE_EXPR, boolean_type_node, x,
7042 build_int_cst (TREE_TYPE (x), 0));
7043 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
7044 gimplify_and_add (x, ilist);
7047 /* If any copyin variable is passed by reference, we must ensure the
7048 master thread doesn't modify it before it is copied over in all
7049 threads. Similarly for variables in both firstprivate and
7050 lastprivate clauses we need to ensure the lastprivate copying
7051 happens after firstprivate copying in all threads. And similarly
7052 for UDRs if initializer expression refers to omp_orig. */
7053 if (copyin_by_ref || lastprivate_firstprivate
7054 || (reduction_omp_orig_ref
7055 && !ctx->scan_inclusive
7056 && !ctx->scan_exclusive))
7058 /* Don't add any barrier for #pragma omp simd or
7059 #pragma omp distribute. */
7060 if (!is_task_ctx (ctx)
7061 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
7062 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
7063 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
7066 /* If max_vf is non-zero, then we can use only a vectorization factor
7067 up to the max_vf we chose. So stick it into the safelen clause. */
7068 if (maybe_ne (sctx.max_vf, 0U))
7070 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
7071 OMP_CLAUSE_SAFELEN);
7072 poly_uint64 safe_len;
7073 if (c == NULL_TREE
7074 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
7075 && maybe_gt (safe_len, sctx.max_vf)))
7077 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
7078 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
7079 sctx.max_vf);
7080 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
7081 gimple_omp_for_set_clauses (ctx->stmt, c);
7086 /* Create temporary variables for lastprivate(conditional:) implementation
7087 in context CTX with CLAUSES. */
7089 static void
7090 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
7092 tree iter_type = NULL_TREE;
7093 tree cond_ptr = NULL_TREE;
7094 tree iter_var = NULL_TREE;
7095 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7096 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
7097 tree next = *clauses;
7098 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
7099 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7100 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
7102 if (is_simd)
7104 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
7105 gcc_assert (cc);
7106 if (iter_type == NULL_TREE)
7108 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
7109 iter_var = create_tmp_var_raw (iter_type);
7110 DECL_CONTEXT (iter_var) = current_function_decl;
7111 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7112 DECL_CHAIN (iter_var) = ctx->block_vars;
7113 ctx->block_vars = iter_var;
7114 tree c3
7115 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7116 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7117 OMP_CLAUSE_DECL (c3) = iter_var;
7118 OMP_CLAUSE_CHAIN (c3) = *clauses;
7119 *clauses = c3;
7120 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7122 next = OMP_CLAUSE_CHAIN (cc);
7123 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7124 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
7125 ctx->lastprivate_conditional_map->put (o, v);
7126 continue;
7128 if (iter_type == NULL)
7130 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
7132 struct omp_for_data fd;
7133 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
7134 NULL);
7135 iter_type = unsigned_type_for (fd.iter_type);
7137 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
7138 iter_type = unsigned_type_node;
7139 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
7140 if (c2)
7142 cond_ptr
7143 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7144 OMP_CLAUSE_DECL (c2) = cond_ptr;
7146 else
7148 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7149 DECL_CONTEXT (cond_ptr) = current_function_decl;
7150 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7151 DECL_CHAIN (cond_ptr) = ctx->block_vars;
7152 ctx->block_vars = cond_ptr;
7153 c2 = build_omp_clause (UNKNOWN_LOCATION,
7154 OMP_CLAUSE__CONDTEMP_);
7155 OMP_CLAUSE_DECL (c2) = cond_ptr;
7156 OMP_CLAUSE_CHAIN (c2) = *clauses;
7157 *clauses = c2;
7159 iter_var = create_tmp_var_raw (iter_type);
7160 DECL_CONTEXT (iter_var) = current_function_decl;
7161 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7162 DECL_CHAIN (iter_var) = ctx->block_vars;
7163 ctx->block_vars = iter_var;
7164 tree c3
7165 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7166 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7167 OMP_CLAUSE_DECL (c3) = iter_var;
7168 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7169 OMP_CLAUSE_CHAIN (c2) = c3;
7170 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7172 tree v = create_tmp_var_raw (iter_type);
7173 DECL_CONTEXT (v) = current_function_decl;
7174 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7175 DECL_CHAIN (v) = ctx->block_vars;
7176 ctx->block_vars = v;
7177 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7178 ctx->lastprivate_conditional_map->put (o, v);
7183 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7184 both parallel and workshare constructs. PREDICATE may be NULL if it's
7185 always true. BODY_P is the sequence to insert early initialization
7186 if needed, STMT_LIST is where the non-conditional lastprivate handling
7187 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7188 section. */
7190 static void
7191 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7192 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7193 omp_context *ctx)
7195 tree x, c, label = NULL, orig_clauses = clauses;
7196 bool par_clauses = false;
7197 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7198 unsigned HOST_WIDE_INT conditional_off = 0;
7199 gimple_seq post_stmt_list = NULL;
7201 /* Early exit if there are no lastprivate or linear clauses. */
7202 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7203 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7204 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7205 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7206 break;
7207 if (clauses == NULL)
7209 /* If this was a workshare clause, see if it had been combined
7210 with its parallel. In that case, look for the clauses on the
7211 parallel statement itself. */
7212 if (is_parallel_ctx (ctx))
7213 return;
7215 ctx = ctx->outer;
7216 if (ctx == NULL || !is_parallel_ctx (ctx))
7217 return;
7219 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7220 OMP_CLAUSE_LASTPRIVATE);
7221 if (clauses == NULL)
7222 return;
7223 par_clauses = true;
7226 bool maybe_simt = false;
7227 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7228 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7230 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7231 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7232 if (simduid)
7233 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7236 if (predicate)
7238 gcond *stmt;
7239 tree label_true, arm1, arm2;
7240 enum tree_code pred_code = TREE_CODE (predicate);
7242 label = create_artificial_label (UNKNOWN_LOCATION);
7243 label_true = create_artificial_label (UNKNOWN_LOCATION);
7244 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7246 arm1 = TREE_OPERAND (predicate, 0);
7247 arm2 = TREE_OPERAND (predicate, 1);
7248 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7249 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7251 else
7253 arm1 = predicate;
7254 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7255 arm2 = boolean_false_node;
7256 pred_code = NE_EXPR;
7258 if (maybe_simt)
7260 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7261 c = fold_convert (integer_type_node, c);
7262 simtcond = create_tmp_var (integer_type_node);
7263 gimplify_assign (simtcond, c, stmt_list);
7264 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7265 1, simtcond);
7266 c = create_tmp_var (integer_type_node);
7267 gimple_call_set_lhs (g, c);
7268 gimple_seq_add_stmt (stmt_list, g);
7269 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7270 label_true, label);
7272 else
7273 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7274 gimple_seq_add_stmt (stmt_list, stmt);
7275 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7278 tree cond_ptr = NULL_TREE;
7279 for (c = clauses; c ;)
7281 tree var, new_var;
7282 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7283 gimple_seq *this_stmt_list = stmt_list;
7284 tree lab2 = NULL_TREE;
7286 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7287 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7288 && ctx->lastprivate_conditional_map
7289 && !ctx->combined_into_simd_safelen1)
7291 gcc_assert (body_p);
7292 if (simduid)
7293 goto next;
7294 if (cond_ptr == NULL_TREE)
7296 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7297 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7299 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7300 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7301 tree v = *ctx->lastprivate_conditional_map->get (o);
7302 gimplify_assign (v, build_zero_cst (type), body_p);
7303 this_stmt_list = cstmt_list;
7304 tree mem;
7305 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7307 mem = build2 (MEM_REF, type, cond_ptr,
7308 build_int_cst (TREE_TYPE (cond_ptr),
7309 conditional_off));
7310 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7312 else
7313 mem = build4 (ARRAY_REF, type, cond_ptr,
7314 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7315 tree mem2 = copy_node (mem);
7316 gimple_seq seq = NULL;
7317 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7318 gimple_seq_add_seq (this_stmt_list, seq);
7319 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7320 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7321 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7322 gimple_seq_add_stmt (this_stmt_list, g);
7323 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7324 gimplify_assign (mem2, v, this_stmt_list);
7326 else if (predicate
7327 && ctx->combined_into_simd_safelen1
7328 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7329 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7330 && ctx->lastprivate_conditional_map)
7331 this_stmt_list = &post_stmt_list;
7333 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7334 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7335 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7337 var = OMP_CLAUSE_DECL (c);
7338 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7339 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7340 && is_taskloop_ctx (ctx))
7342 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7343 new_var = lookup_decl (var, ctx->outer);
7345 else
7347 new_var = lookup_decl (var, ctx);
7348 /* Avoid uninitialized warnings for lastprivate and
7349 for linear iterators. */
7350 if (predicate
7351 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7352 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7353 suppress_warning (new_var, OPT_Wuninitialized);
7356 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7358 tree val = DECL_VALUE_EXPR (new_var);
7359 if (TREE_CODE (val) == ARRAY_REF
7360 && VAR_P (TREE_OPERAND (val, 0))
7361 && lookup_attribute ("omp simd array",
7362 DECL_ATTRIBUTES (TREE_OPERAND (val,
7363 0))))
7365 if (lastlane == NULL)
7367 lastlane = create_tmp_var (unsigned_type_node);
7368 gcall *g
7369 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7370 2, simduid,
7371 TREE_OPERAND (val, 1));
7372 gimple_call_set_lhs (g, lastlane);
7373 gimple_seq_add_stmt (this_stmt_list, g);
7375 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7376 TREE_OPERAND (val, 0), lastlane,
7377 NULL_TREE, NULL_TREE);
7378 TREE_THIS_NOTRAP (new_var) = 1;
7381 else if (maybe_simt)
7383 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7384 ? DECL_VALUE_EXPR (new_var)
7385 : new_var);
7386 if (simtlast == NULL)
7388 simtlast = create_tmp_var (unsigned_type_node);
7389 gcall *g = gimple_build_call_internal
7390 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7391 gimple_call_set_lhs (g, simtlast);
7392 gimple_seq_add_stmt (this_stmt_list, g);
7394 x = build_call_expr_internal_loc
7395 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7396 TREE_TYPE (val), 2, val, simtlast);
7397 new_var = unshare_expr (new_var);
7398 gimplify_assign (new_var, x, this_stmt_list);
7399 new_var = unshare_expr (new_var);
7402 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7403 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7405 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7406 gimple_seq_add_seq (this_stmt_list,
7407 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7408 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7410 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7411 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7413 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7414 gimple_seq_add_seq (this_stmt_list,
7415 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7416 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7419 x = NULL_TREE;
7420 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7421 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7422 && is_taskloop_ctx (ctx))
7424 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7425 ctx->outer->outer);
7426 if (is_global_var (ovar))
7427 x = ovar;
7429 if (!x)
7430 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7431 if (omp_privatize_by_reference (var))
7432 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7433 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7434 gimplify_and_add (x, this_stmt_list);
7436 if (lab2)
7437 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7440 next:
7441 c = OMP_CLAUSE_CHAIN (c);
7442 if (c == NULL && !par_clauses)
7444 /* If this was a workshare clause, see if it had been combined
7445 with its parallel. In that case, continue looking for the
7446 clauses also on the parallel statement itself. */
7447 if (is_parallel_ctx (ctx))
7448 break;
7450 ctx = ctx->outer;
7451 if (ctx == NULL || !is_parallel_ctx (ctx))
7452 break;
7454 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7455 OMP_CLAUSE_LASTPRIVATE);
7456 par_clauses = true;
7460 if (label)
7461 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7462 gimple_seq_add_seq (stmt_list, post_stmt_list);
7465 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7466 (which might be a placeholder). INNER is true if this is an inner
7467 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7468 join markers. Generate the before-loop forking sequence in
7469 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7470 general form of these sequences is
7472 GOACC_REDUCTION_SETUP
7473 GOACC_FORK
7474 GOACC_REDUCTION_INIT
7476 GOACC_REDUCTION_FINI
7477 GOACC_JOIN
7478 GOACC_REDUCTION_TEARDOWN. */
7480 static void
7481 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7482 gcall *fork, gcall *private_marker, gcall *join,
7483 gimple_seq *fork_seq, gimple_seq *join_seq,
7484 omp_context *ctx)
7486 gimple_seq before_fork = NULL;
7487 gimple_seq after_fork = NULL;
7488 gimple_seq before_join = NULL;
7489 gimple_seq after_join = NULL;
7490 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7491 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7492 unsigned offset = 0;
7494 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7495 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7497 /* No 'reduction' clauses on OpenACC 'kernels'. */
7498 gcc_checking_assert (!is_oacc_kernels (ctx));
7499 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7500 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7502 tree orig = OMP_CLAUSE_DECL (c);
7503 tree var = maybe_lookup_decl (orig, ctx);
7504 tree ref_to_res = NULL_TREE;
7505 tree incoming, outgoing, v1, v2, v3;
7506 bool is_private = false;
7508 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7509 if (rcode == MINUS_EXPR)
7510 rcode = PLUS_EXPR;
7511 else if (rcode == TRUTH_ANDIF_EXPR)
7512 rcode = BIT_AND_EXPR;
7513 else if (rcode == TRUTH_ORIF_EXPR)
7514 rcode = BIT_IOR_EXPR;
7515 tree op = build_int_cst (unsigned_type_node, rcode);
7517 if (!var)
7518 var = orig;
7520 incoming = outgoing = var;
7522 if (!inner)
7524 /* See if an outer construct also reduces this variable. */
7525 omp_context *outer = ctx;
7527 while (omp_context *probe = outer->outer)
7529 enum gimple_code type = gimple_code (probe->stmt);
7530 tree cls;
7532 switch (type)
7534 case GIMPLE_OMP_FOR:
7535 cls = gimple_omp_for_clauses (probe->stmt);
7536 break;
7538 case GIMPLE_OMP_TARGET:
7539 /* No 'reduction' clauses inside OpenACC 'kernels'
7540 regions. */
7541 gcc_checking_assert (!is_oacc_kernels (probe));
7543 if (!is_gimple_omp_offloaded (probe->stmt))
7544 goto do_lookup;
7546 cls = gimple_omp_target_clauses (probe->stmt);
7547 break;
7549 default:
7550 goto do_lookup;
7553 outer = probe;
7554 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7555 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7556 && orig == OMP_CLAUSE_DECL (cls))
7558 incoming = outgoing = lookup_decl (orig, probe);
7559 goto has_outer_reduction;
7561 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7562 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7563 && orig == OMP_CLAUSE_DECL (cls))
7565 is_private = true;
7566 goto do_lookup;
7570 do_lookup:
7571 /* This is the outermost construct with this reduction,
7572 see if there's a mapping for it. */
7573 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7574 && maybe_lookup_field (orig, outer) && !is_private)
7576 ref_to_res = build_receiver_ref (orig, false, outer);
7577 if (omp_privatize_by_reference (orig))
7578 ref_to_res = build_simple_mem_ref (ref_to_res);
7580 tree type = TREE_TYPE (var);
7581 if (POINTER_TYPE_P (type))
7582 type = TREE_TYPE (type);
7584 outgoing = var;
7585 incoming = omp_reduction_init_op (loc, rcode, type);
7587 else
7589 /* Try to look at enclosing contexts for reduction var,
7590 use original if no mapping found. */
7591 tree t = NULL_TREE;
7592 omp_context *c = ctx->outer;
7593 while (c && !t)
7595 t = maybe_lookup_decl (orig, c);
7596 c = c->outer;
7598 incoming = outgoing = (t ? t : orig);
7601 has_outer_reduction:;
7604 if (!ref_to_res)
7605 ref_to_res = integer_zero_node;
7607 if (omp_privatize_by_reference (orig))
7609 tree type = TREE_TYPE (var);
7610 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7612 if (!inner)
7614 tree x = create_tmp_var (TREE_TYPE (type), id);
7615 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7618 v1 = create_tmp_var (type, id);
7619 v2 = create_tmp_var (type, id);
7620 v3 = create_tmp_var (type, id);
7622 gimplify_assign (v1, var, fork_seq);
7623 gimplify_assign (v2, var, fork_seq);
7624 gimplify_assign (v3, var, fork_seq);
7626 var = build_simple_mem_ref (var);
7627 v1 = build_simple_mem_ref (v1);
7628 v2 = build_simple_mem_ref (v2);
7629 v3 = build_simple_mem_ref (v3);
7630 outgoing = build_simple_mem_ref (outgoing);
7632 if (!TREE_CONSTANT (incoming))
7633 incoming = build_simple_mem_ref (incoming);
7635 else
7636 /* Note that 'var' might be a mem ref. */
7637 v1 = v2 = v3 = var;
7639 /* Determine position in reduction buffer, which may be used
7640 by target. The parser has ensured that this is not a
7641 variable-sized type. */
7642 fixed_size_mode mode
7643 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7644 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7645 offset = (offset + align - 1) & ~(align - 1);
7646 tree off = build_int_cst (sizetype, offset);
7647 offset += GET_MODE_SIZE (mode);
7649 if (!init_code)
7651 init_code = build_int_cst (integer_type_node,
7652 IFN_GOACC_REDUCTION_INIT);
7653 fini_code = build_int_cst (integer_type_node,
7654 IFN_GOACC_REDUCTION_FINI);
7655 setup_code = build_int_cst (integer_type_node,
7656 IFN_GOACC_REDUCTION_SETUP);
7657 teardown_code = build_int_cst (integer_type_node,
7658 IFN_GOACC_REDUCTION_TEARDOWN);
7661 tree setup_call
7662 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7663 TREE_TYPE (var), 6, setup_code,
7664 unshare_expr (ref_to_res),
7665 unshare_expr (incoming),
7666 level, op, off);
7667 tree init_call
7668 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7669 TREE_TYPE (var), 6, init_code,
7670 unshare_expr (ref_to_res),
7671 unshare_expr (v1), level, op, off);
7672 tree fini_call
7673 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7674 TREE_TYPE (var), 6, fini_code,
7675 unshare_expr (ref_to_res),
7676 unshare_expr (v2), level, op, off);
7677 tree teardown_call
7678 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7679 TREE_TYPE (var), 6, teardown_code,
7680 ref_to_res, unshare_expr (v3),
7681 level, op, off);
7683 gimplify_assign (unshare_expr (v1), setup_call, &before_fork);
7684 gimplify_assign (unshare_expr (v2), init_call, &after_fork);
7685 gimplify_assign (unshare_expr (v3), fini_call, &before_join);
7686 gimplify_assign (unshare_expr (outgoing), teardown_call, &after_join);
7689 /* Now stitch things together. */
7690 gimple_seq_add_seq (fork_seq, before_fork);
7691 if (private_marker)
7692 gimple_seq_add_stmt (fork_seq, private_marker);
7693 if (fork)
7694 gimple_seq_add_stmt (fork_seq, fork);
7695 gimple_seq_add_seq (fork_seq, after_fork);
7697 gimple_seq_add_seq (join_seq, before_join);
7698 if (join)
7699 gimple_seq_add_stmt (join_seq, join);
7700 gimple_seq_add_seq (join_seq, after_join);
7703 /* Generate code to implement the REDUCTION clauses, append it
7704 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7705 that should be emitted also inside of the critical section,
7706 in that case clear *CLIST afterwards, otherwise leave it as is
7707 and let the caller emit it itself. */
7709 static void
7710 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7711 gimple_seq *clist, omp_context *ctx)
7713 gimple_seq sub_seq = NULL;
7714 gimple *stmt;
7715 tree x, c;
7716 int count = 0;
7718 /* OpenACC loop reductions are handled elsewhere. */
7719 if (is_gimple_omp_oacc (ctx->stmt))
7720 return;
7722 /* SIMD reductions are handled in lower_rec_input_clauses. */
7723 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7724 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7725 return;
7727 /* inscan reductions are handled elsewhere. */
7728 if (ctx->scan_inclusive || ctx->scan_exclusive)
7729 return;
7731 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7732 update in that case, otherwise use a lock. */
7733 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7734 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7735 && !OMP_CLAUSE_REDUCTION_TASK (c))
7737 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7738 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7740 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7741 count = -1;
7742 break;
7744 count++;
7747 if (count == 0)
7748 return;
7750 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7752 tree var, ref, new_var, orig_var;
7753 enum tree_code code;
7754 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7756 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7757 || OMP_CLAUSE_REDUCTION_TASK (c))
7758 continue;
7760 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7761 orig_var = var = OMP_CLAUSE_DECL (c);
7762 if (TREE_CODE (var) == MEM_REF)
7764 var = TREE_OPERAND (var, 0);
7765 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7766 var = TREE_OPERAND (var, 0);
7767 if (TREE_CODE (var) == ADDR_EXPR)
7768 var = TREE_OPERAND (var, 0);
7769 else
7771 /* If this is a pointer or referenced based array
7772 section, the var could be private in the outer
7773 context e.g. on orphaned loop construct. Pretend this
7774 is private variable's outer reference. */
7775 ccode = OMP_CLAUSE_PRIVATE;
7776 if (TREE_CODE (var) == INDIRECT_REF)
7777 var = TREE_OPERAND (var, 0);
7779 orig_var = var;
7780 if (is_variable_sized (var))
7782 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7783 var = DECL_VALUE_EXPR (var);
7784 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7785 var = TREE_OPERAND (var, 0);
7786 gcc_assert (DECL_P (var));
7789 new_var = lookup_decl (var, ctx);
7790 if (var == OMP_CLAUSE_DECL (c)
7791 && omp_privatize_by_reference (var))
7792 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7793 ref = build_outer_var_ref (var, ctx, ccode);
7794 code = OMP_CLAUSE_REDUCTION_CODE (c);
7796 /* reduction(-:var) sums up the partial results, so it acts
7797 identically to reduction(+:var). */
7798 if (code == MINUS_EXPR)
7799 code = PLUS_EXPR;
7801 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7802 if (count == 1)
7804 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7806 addr = save_expr (addr);
7807 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7808 tree new_var2 = new_var;
7809 tree ref2 = ref;
7810 if (is_truth_op)
7812 tree zero = build_zero_cst (TREE_TYPE (new_var));
7813 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7814 boolean_type_node, new_var, zero);
7815 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7816 ref, zero);
7818 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7819 new_var2);
7820 if (is_truth_op)
7821 x = fold_convert (TREE_TYPE (new_var), x);
7822 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7823 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7824 gimplify_and_add (x, stmt_seqp);
7825 return;
7827 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7829 tree d = OMP_CLAUSE_DECL (c);
7830 tree type = TREE_TYPE (d);
7831 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7832 tree i = create_tmp_var (TREE_TYPE (v));
7833 tree ptype = build_pointer_type (TREE_TYPE (type));
7834 tree bias = TREE_OPERAND (d, 1);
7835 d = TREE_OPERAND (d, 0);
7836 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7838 tree b = TREE_OPERAND (d, 1);
7839 b = maybe_lookup_decl (b, ctx);
7840 if (b == NULL)
7842 b = TREE_OPERAND (d, 1);
7843 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7845 if (integer_zerop (bias))
7846 bias = b;
7847 else
7849 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7850 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7851 TREE_TYPE (b), b, bias);
7853 d = TREE_OPERAND (d, 0);
7855 /* For ref build_outer_var_ref already performs this, so
7856 only new_var needs a dereference. */
7857 if (TREE_CODE (d) == INDIRECT_REF)
7859 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7860 gcc_assert (omp_privatize_by_reference (var)
7861 && var == orig_var);
7863 else if (TREE_CODE (d) == ADDR_EXPR)
7865 if (orig_var == var)
7867 new_var = build_fold_addr_expr (new_var);
7868 ref = build_fold_addr_expr (ref);
7871 else
7873 gcc_assert (orig_var == var);
7874 if (omp_privatize_by_reference (var))
7875 ref = build_fold_addr_expr (ref);
7877 if (DECL_P (v))
7879 tree t = maybe_lookup_decl (v, ctx);
7880 if (t)
7881 v = t;
7882 else
7883 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7884 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7886 if (!integer_zerop (bias))
7888 bias = fold_convert_loc (clause_loc, sizetype, bias);
7889 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7890 TREE_TYPE (new_var), new_var,
7891 unshare_expr (bias));
7892 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7893 TREE_TYPE (ref), ref, bias);
7895 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7896 ref = fold_convert_loc (clause_loc, ptype, ref);
7897 tree m = create_tmp_var (ptype);
7898 gimplify_assign (m, new_var, stmt_seqp);
7899 new_var = m;
7900 m = create_tmp_var (ptype);
7901 gimplify_assign (m, ref, stmt_seqp);
7902 ref = m;
7903 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7904 tree body = create_artificial_label (UNKNOWN_LOCATION);
7905 tree end = create_artificial_label (UNKNOWN_LOCATION);
7906 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7907 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7908 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7909 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7911 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7912 tree decl_placeholder
7913 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7914 SET_DECL_VALUE_EXPR (placeholder, out);
7915 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7916 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7917 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7918 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7919 gimple_seq_add_seq (&sub_seq,
7920 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7921 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7922 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7923 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7925 else
7927 tree out2 = out;
7928 tree priv2 = priv;
7929 if (is_truth_op)
7931 tree zero = build_zero_cst (TREE_TYPE (out));
7932 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7933 boolean_type_node, out, zero);
7934 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7935 boolean_type_node, priv, zero);
7937 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7938 if (is_truth_op)
7939 x = fold_convert (TREE_TYPE (out), x);
7940 out = unshare_expr (out);
7941 gimplify_assign (out, x, &sub_seq);
7943 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7944 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7945 gimple_seq_add_stmt (&sub_seq, g);
7946 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7947 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7948 gimple_seq_add_stmt (&sub_seq, g);
7949 g = gimple_build_assign (i, PLUS_EXPR, i,
7950 build_int_cst (TREE_TYPE (i), 1));
7951 gimple_seq_add_stmt (&sub_seq, g);
7952 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7953 gimple_seq_add_stmt (&sub_seq, g);
7954 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7956 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7958 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7960 if (omp_privatize_by_reference (var)
7961 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7962 TREE_TYPE (ref)))
7963 ref = build_fold_addr_expr_loc (clause_loc, ref);
7964 SET_DECL_VALUE_EXPR (placeholder, ref);
7965 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7966 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7967 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7968 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7969 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7971 else
7973 tree new_var2 = new_var;
7974 tree ref2 = ref;
7975 if (is_truth_op)
7977 tree zero = build_zero_cst (TREE_TYPE (new_var));
7978 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7979 boolean_type_node, new_var, zero);
7980 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7981 ref, zero);
7983 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7984 if (is_truth_op)
7985 x = fold_convert (TREE_TYPE (new_var), x);
7986 ref = build_outer_var_ref (var, ctx);
7987 gimplify_assign (ref, x, &sub_seq);
7991 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7993 gimple_seq_add_stmt (stmt_seqp, stmt);
7995 gimple_seq_add_seq (stmt_seqp, sub_seq);
7997 if (clist)
7999 gimple_seq_add_seq (stmt_seqp, *clist);
8000 *clist = NULL;
8003 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
8005 gimple_seq_add_stmt (stmt_seqp, stmt);
8009 /* Generate code to implement the COPYPRIVATE clauses. */
8011 static void
8012 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
8013 omp_context *ctx)
8015 tree c;
8017 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8019 tree var, new_var, ref, x;
8020 bool by_ref;
8021 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8023 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
8024 continue;
8026 var = OMP_CLAUSE_DECL (c);
8027 by_ref = use_pointer_for_field (var, NULL);
8029 ref = build_sender_ref (var, ctx);
8030 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
8031 if (by_ref)
8033 x = build_fold_addr_expr_loc (clause_loc, new_var);
8034 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
8036 gimplify_assign (ref, x, slist);
8038 ref = build_receiver_ref (var, false, ctx);
8039 if (by_ref)
8041 ref = fold_convert_loc (clause_loc,
8042 build_pointer_type (TREE_TYPE (new_var)),
8043 ref);
8044 ref = build_fold_indirect_ref_loc (clause_loc, ref);
8046 if (omp_privatize_by_reference (var))
8048 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
8049 ref = build_simple_mem_ref_loc (clause_loc, ref);
8050 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
8052 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
8053 gimplify_and_add (x, rlist);
8058 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
8059 and REDUCTION from the sender (aka parent) side. */
8061 static void
8062 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
8063 omp_context *ctx)
8065 tree c, t;
8066 int ignored_looptemp = 0;
8067 bool is_taskloop = false;
8069 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
8070 by GOMP_taskloop. */
8071 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
8073 ignored_looptemp = 2;
8074 is_taskloop = true;
8077 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8079 tree val, ref, x, var;
8080 bool by_ref, do_in = false, do_out = false;
8081 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8083 switch (OMP_CLAUSE_CODE (c))
8085 case OMP_CLAUSE_PRIVATE:
8086 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
8087 break;
8088 continue;
8089 case OMP_CLAUSE_FIRSTPRIVATE:
8090 case OMP_CLAUSE_COPYIN:
8091 case OMP_CLAUSE_LASTPRIVATE:
8092 case OMP_CLAUSE_IN_REDUCTION:
8093 case OMP_CLAUSE__REDUCTEMP_:
8094 break;
8095 case OMP_CLAUSE_REDUCTION:
8096 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
8097 continue;
8098 break;
8099 case OMP_CLAUSE_SHARED:
8100 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8101 break;
8102 continue;
8103 case OMP_CLAUSE__LOOPTEMP_:
8104 if (ignored_looptemp)
8106 ignored_looptemp--;
8107 continue;
8109 break;
8110 default:
8111 continue;
8114 val = OMP_CLAUSE_DECL (c);
8115 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8116 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
8117 && TREE_CODE (val) == MEM_REF)
8119 val = TREE_OPERAND (val, 0);
8120 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
8121 val = TREE_OPERAND (val, 0);
8122 if (TREE_CODE (val) == INDIRECT_REF
8123 || TREE_CODE (val) == ADDR_EXPR)
8124 val = TREE_OPERAND (val, 0);
8125 if (is_variable_sized (val))
8126 continue;
8129 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8130 outer taskloop region. */
8131 omp_context *ctx_for_o = ctx;
8132 if (is_taskloop
8133 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8134 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8135 ctx_for_o = ctx->outer;
8137 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
8139 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8140 && is_global_var (var)
8141 && (val == OMP_CLAUSE_DECL (c)
8142 || !is_task_ctx (ctx)
8143 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8144 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8145 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8146 != POINTER_TYPE)))))
8147 continue;
8149 t = omp_member_access_dummy_var (var);
8150 if (t)
8152 var = DECL_VALUE_EXPR (var);
8153 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
8154 if (o != t)
8155 var = unshare_and_remap (var, t, o);
8156 else
8157 var = unshare_expr (var);
8160 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8162 /* Handle taskloop firstprivate/lastprivate, where the
8163 lastprivate on GIMPLE_OMP_TASK is represented as
8164 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8165 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
8166 x = omp_build_component_ref (ctx->sender_decl, f);
8167 if (use_pointer_for_field (val, ctx))
8168 var = build_fold_addr_expr (var);
8169 gimplify_assign (x, var, ilist);
8170 DECL_ABSTRACT_ORIGIN (f) = NULL;
8171 continue;
8174 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8175 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8176 || val == OMP_CLAUSE_DECL (c))
8177 && is_variable_sized (val))
8178 continue;
8179 by_ref = use_pointer_for_field (val, NULL);
8181 switch (OMP_CLAUSE_CODE (c))
8183 case OMP_CLAUSE_FIRSTPRIVATE:
8184 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8185 && !by_ref
8186 && is_task_ctx (ctx))
8187 suppress_warning (var);
8188 do_in = true;
8189 break;
8191 case OMP_CLAUSE_PRIVATE:
8192 case OMP_CLAUSE_COPYIN:
8193 case OMP_CLAUSE__LOOPTEMP_:
8194 case OMP_CLAUSE__REDUCTEMP_:
8195 do_in = true;
8196 break;
8198 case OMP_CLAUSE_LASTPRIVATE:
8199 if (by_ref || omp_privatize_by_reference (val))
8201 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8202 continue;
8203 do_in = true;
8205 else
8207 do_out = true;
8208 if (lang_hooks.decls.omp_private_outer_ref (val))
8209 do_in = true;
8211 break;
8213 case OMP_CLAUSE_REDUCTION:
8214 case OMP_CLAUSE_IN_REDUCTION:
8215 do_in = true;
8216 if (val == OMP_CLAUSE_DECL (c))
8218 if (is_task_ctx (ctx))
8219 by_ref = use_pointer_for_field (val, ctx);
8220 else
8221 do_out = !(by_ref || omp_privatize_by_reference (val));
8223 else
8224 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8225 break;
8227 default:
8228 gcc_unreachable ();
8231 if (do_in)
8233 ref = build_sender_ref (val, ctx);
8234 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8235 gimplify_assign (ref, x, ilist);
8236 if (is_task_ctx (ctx))
8237 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8240 if (do_out)
8242 ref = build_sender_ref (val, ctx);
8243 gimplify_assign (var, ref, olist);
8248 /* Generate code to implement SHARED from the sender (aka parent)
8249 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8250 list things that got automatically shared. */
8252 static void
8253 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8255 tree var, ovar, nvar, t, f, x, record_type;
8257 if (ctx->record_type == NULL)
8258 return;
8260 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8261 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8263 ovar = DECL_ABSTRACT_ORIGIN (f);
8264 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8265 continue;
8267 nvar = maybe_lookup_decl (ovar, ctx);
8268 if (!nvar
8269 || !DECL_HAS_VALUE_EXPR_P (nvar)
8270 || (ctx->allocate_map
8271 && ctx->allocate_map->get (ovar)))
8272 continue;
8274 /* If CTX is a nested parallel directive. Find the immediately
8275 enclosing parallel or workshare construct that contains a
8276 mapping for OVAR. */
8277 var = lookup_decl_in_outer_ctx (ovar, ctx);
8279 t = omp_member_access_dummy_var (var);
8280 if (t)
8282 var = DECL_VALUE_EXPR (var);
8283 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8284 if (o != t)
8285 var = unshare_and_remap (var, t, o);
8286 else
8287 var = unshare_expr (var);
8290 if (use_pointer_for_field (ovar, ctx))
8292 x = build_sender_ref (ovar, ctx);
8293 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8294 && TREE_TYPE (f) == TREE_TYPE (ovar))
8296 gcc_assert (is_parallel_ctx (ctx)
8297 && DECL_ARTIFICIAL (ovar));
8298 /* _condtemp_ clause. */
8299 var = build_constructor (TREE_TYPE (x), NULL);
8301 else
8302 var = build_fold_addr_expr (var);
8303 gimplify_assign (x, var, ilist);
8305 else
8307 x = build_sender_ref (ovar, ctx);
8308 gimplify_assign (x, var, ilist);
8310 if (!TREE_READONLY (var)
8311 /* We don't need to receive a new reference to a result
8312 or parm decl. In fact we may not store to it as we will
8313 invalidate any pending RSO and generate wrong gimple
8314 during inlining. */
8315 && !((TREE_CODE (var) == RESULT_DECL
8316 || TREE_CODE (var) == PARM_DECL)
8317 && DECL_BY_REFERENCE (var)))
8319 x = build_sender_ref (ovar, ctx);
8320 gimplify_assign (var, x, olist);
8326 /* Emit an OpenACC head marker call, encapulating the partitioning and
8327 other information that must be processed by the target compiler.
8328 Return the maximum number of dimensions the associated loop might
8329 be partitioned over. */
8331 static unsigned
8332 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8333 gimple_seq *seq, omp_context *ctx)
8335 unsigned levels = 0;
8336 unsigned tag = 0;
8337 tree gang_static = NULL_TREE;
8338 auto_vec<tree, 5> args;
8340 args.quick_push (build_int_cst
8341 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8342 args.quick_push (ddvar);
8343 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8345 switch (OMP_CLAUSE_CODE (c))
8347 case OMP_CLAUSE_GANG:
8348 tag |= OLF_DIM_GANG;
8349 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8350 /* static:* is represented by -1, and we can ignore it, as
8351 scheduling is always static. */
8352 if (gang_static && integer_minus_onep (gang_static))
8353 gang_static = NULL_TREE;
8354 levels++;
8355 break;
8357 case OMP_CLAUSE_WORKER:
8358 tag |= OLF_DIM_WORKER;
8359 levels++;
8360 break;
8362 case OMP_CLAUSE_VECTOR:
8363 tag |= OLF_DIM_VECTOR;
8364 levels++;
8365 break;
8367 case OMP_CLAUSE_SEQ:
8368 tag |= OLF_SEQ;
8369 break;
8371 case OMP_CLAUSE_AUTO:
8372 tag |= OLF_AUTO;
8373 break;
8375 case OMP_CLAUSE_INDEPENDENT:
8376 tag |= OLF_INDEPENDENT;
8377 break;
8379 case OMP_CLAUSE_TILE:
8380 tag |= OLF_TILE;
8381 break;
8383 case OMP_CLAUSE_REDUCTION:
8384 tag |= OLF_REDUCTION;
8385 break;
8387 default:
8388 continue;
8392 if (gang_static)
8394 if (DECL_P (gang_static))
8395 gang_static = build_outer_var_ref (gang_static, ctx);
8396 tag |= OLF_GANG_STATIC;
8399 omp_context *tgt = enclosing_target_ctx (ctx);
8400 if (!tgt || is_oacc_parallel_or_serial (tgt))
8402 else if (is_oacc_kernels (tgt))
8403 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8404 gcc_unreachable ();
8405 else if (is_oacc_kernels_decomposed_part (tgt))
8407 else
8408 gcc_unreachable ();
8410 /* In a parallel region, loops are implicitly INDEPENDENT. */
8411 if (!tgt || is_oacc_parallel_or_serial (tgt))
8412 tag |= OLF_INDEPENDENT;
8414 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8415 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8416 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8418 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8419 gcc_assert (!(tag & OLF_AUTO));
8422 if (tag & OLF_TILE)
8423 /* Tiling could use all 3 levels. */
8424 levels = 3;
8425 else
8427 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8428 Ensure at least one level, or 2 for possible auto
8429 partitioning */
8430 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8431 << OLF_DIM_BASE) | OLF_SEQ));
8433 if (levels < 1u + maybe_auto)
8434 levels = 1u + maybe_auto;
8437 args.quick_push (build_int_cst (integer_type_node, levels));
8438 args.quick_push (build_int_cst (integer_type_node, tag));
8439 if (gang_static)
8440 args.quick_push (gang_static);
8442 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8443 gimple_set_location (call, loc);
8444 gimple_set_lhs (call, ddvar);
8445 gimple_seq_add_stmt (seq, call);
8447 return levels;
8450 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8451 partitioning level of the enclosed region. */
8453 static void
8454 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8455 tree tofollow, gimple_seq *seq)
8457 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8458 : IFN_UNIQUE_OACC_TAIL_MARK);
8459 tree marker = build_int_cst (integer_type_node, marker_kind);
8460 int nargs = 2 + (tofollow != NULL_TREE);
8461 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8462 marker, ddvar, tofollow);
8463 gimple_set_location (call, loc);
8464 gimple_set_lhs (call, ddvar);
8465 gimple_seq_add_stmt (seq, call);
8468 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8469 the loop clauses, from which we extract reductions. Initialize
8470 HEAD and TAIL. */
8472 static void
8473 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8474 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8476 bool inner = false;
8477 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8478 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8480 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8482 if (private_marker)
8484 gimple_set_location (private_marker, loc);
8485 gimple_call_set_lhs (private_marker, ddvar);
8486 gimple_call_set_arg (private_marker, 1, ddvar);
8489 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8490 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8492 gcc_assert (count);
8493 for (unsigned done = 1; count; count--, done++)
8495 gimple_seq fork_seq = NULL;
8496 gimple_seq join_seq = NULL;
8498 tree place = build_int_cst (integer_type_node, -1);
8499 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8500 fork_kind, ddvar, place);
8501 gimple_set_location (fork, loc);
8502 gimple_set_lhs (fork, ddvar);
8504 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8505 join_kind, ddvar, place);
8506 gimple_set_location (join, loc);
8507 gimple_set_lhs (join, ddvar);
8509 /* Mark the beginning of this level sequence. */
8510 if (inner)
8511 lower_oacc_loop_marker (loc, ddvar, true,
8512 build_int_cst (integer_type_node, count),
8513 &fork_seq);
8514 lower_oacc_loop_marker (loc, ddvar, false,
8515 build_int_cst (integer_type_node, done),
8516 &join_seq);
8518 lower_oacc_reductions (loc, clauses, place, inner,
8519 fork, (count == 1) ? private_marker : NULL,
8520 join, &fork_seq, &join_seq, ctx);
8522 /* Append this level to head. */
8523 gimple_seq_add_seq (head, fork_seq);
8524 /* Prepend it to tail. */
8525 gimple_seq_add_seq (&join_seq, *tail);
8526 *tail = join_seq;
8528 inner = true;
8531 /* Mark the end of the sequence. */
8532 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8533 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8536 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8537 catch handler and return it. This prevents programs from violating the
8538 structured block semantics with throws. */
8540 static gimple_seq
8541 maybe_catch_exception (gimple_seq body)
8543 gimple *g;
8544 tree decl;
8546 if (!flag_exceptions)
8547 return body;
8549 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8550 decl = lang_hooks.eh_protect_cleanup_actions ();
8551 else
8552 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8554 g = gimple_build_eh_must_not_throw (decl);
8555 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8556 GIMPLE_TRY_CATCH);
8558 return gimple_seq_alloc_with_stmt (g);
8562 /* Routines to lower OMP directives into OMP-GIMPLE. */
8564 /* If ctx is a worksharing context inside of a cancellable parallel
8565 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8566 and conditional branch to parallel's cancel_label to handle
8567 cancellation in the implicit barrier. */
8569 static void
8570 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8571 gimple_seq *body)
8573 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8574 if (gimple_omp_return_nowait_p (omp_return))
8575 return;
8576 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8577 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8578 && outer->cancellable)
8580 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8581 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8582 tree lhs = create_tmp_var (c_bool_type);
8583 gimple_omp_return_set_lhs (omp_return, lhs);
8584 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8585 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8586 fold_convert (c_bool_type,
8587 boolean_false_node),
8588 outer->cancel_label, fallthru_label);
8589 gimple_seq_add_stmt (body, g);
8590 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8592 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8593 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8594 return;
8597 /* Find the first task_reduction or reduction clause or return NULL
8598 if there are none. */
8600 static inline tree
8601 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8602 enum omp_clause_code ccode)
8604 while (1)
8606 clauses = omp_find_clause (clauses, ccode);
8607 if (clauses == NULL_TREE)
8608 return NULL_TREE;
8609 if (ccode != OMP_CLAUSE_REDUCTION
8610 || code == OMP_TASKLOOP
8611 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8612 return clauses;
8613 clauses = OMP_CLAUSE_CHAIN (clauses);
8617 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8618 gimple_seq *, gimple_seq *);
8620 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8621 CTX is the enclosing OMP context for the current statement. */
8623 static void
8624 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8626 tree block, control;
8627 gimple_stmt_iterator tgsi;
8628 gomp_sections *stmt;
8629 gimple *t;
8630 gbind *new_stmt, *bind;
8631 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8633 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8635 push_gimplify_context ();
8637 dlist = NULL;
8638 ilist = NULL;
8640 tree rclauses
8641 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8642 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8643 tree rtmp = NULL_TREE;
8644 if (rclauses)
8646 tree type = build_pointer_type (pointer_sized_int_node);
8647 tree temp = create_tmp_var (type);
8648 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8649 OMP_CLAUSE_DECL (c) = temp;
8650 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8651 gimple_omp_sections_set_clauses (stmt, c);
8652 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8653 gimple_omp_sections_clauses (stmt),
8654 &ilist, &tred_dlist);
8655 rclauses = c;
8656 rtmp = make_ssa_name (type);
8657 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8660 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8661 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8663 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8664 &ilist, &dlist, ctx, NULL);
8666 control = create_tmp_var (unsigned_type_node, ".section");
8667 gimple_omp_sections_set_control (stmt, control);
8669 new_body = gimple_omp_body (stmt);
8670 gimple_omp_set_body (stmt, NULL);
8671 tgsi = gsi_start (new_body);
8672 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8674 omp_context *sctx;
8675 gimple *sec_start;
8677 sec_start = gsi_stmt (tgsi);
8678 sctx = maybe_lookup_ctx (sec_start);
8679 gcc_assert (sctx);
8681 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8682 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8683 GSI_CONTINUE_LINKING);
8684 gimple_omp_set_body (sec_start, NULL);
8686 if (gsi_one_before_end_p (tgsi))
8688 gimple_seq l = NULL;
8689 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8690 &ilist, &l, &clist, ctx);
8691 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8692 gimple_omp_section_set_last (sec_start);
8695 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8696 GSI_CONTINUE_LINKING);
8699 block = make_node (BLOCK);
8700 bind = gimple_build_bind (NULL, new_body, block);
8702 olist = NULL;
8703 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8704 &clist, ctx);
8705 if (clist)
8707 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8708 gcall *g = gimple_build_call (fndecl, 0);
8709 gimple_seq_add_stmt (&olist, g);
8710 gimple_seq_add_seq (&olist, clist);
8711 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8712 g = gimple_build_call (fndecl, 0);
8713 gimple_seq_add_stmt (&olist, g);
8716 block = make_node (BLOCK);
8717 new_stmt = gimple_build_bind (NULL, NULL, block);
8718 gsi_replace (gsi_p, new_stmt, true);
8720 pop_gimplify_context (new_stmt);
8721 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8722 BLOCK_VARS (block) = gimple_bind_vars (bind);
8723 if (BLOCK_VARS (block))
8724 TREE_USED (block) = 1;
8726 new_body = NULL;
8727 gimple_seq_add_seq (&new_body, ilist);
8728 gimple_seq_add_stmt (&new_body, stmt);
8729 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8730 gimple_seq_add_stmt (&new_body, bind);
8732 t = gimple_build_omp_continue (control, control);
8733 gimple_seq_add_stmt (&new_body, t);
8735 gimple_seq_add_seq (&new_body, olist);
8736 if (ctx->cancellable)
8737 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8738 gimple_seq_add_seq (&new_body, dlist);
8740 new_body = maybe_catch_exception (new_body);
8742 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8743 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8744 t = gimple_build_omp_return (nowait);
8745 gimple_seq_add_stmt (&new_body, t);
8746 gimple_seq_add_seq (&new_body, tred_dlist);
8747 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8749 if (rclauses)
8750 OMP_CLAUSE_DECL (rclauses) = rtmp;
8752 gimple_bind_set_body (new_stmt, new_body);
8756 /* A subroutine of lower_omp_single. Expand the simple form of
8757 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8759 if (GOMP_single_start ())
8760 BODY;
8761 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8763 FIXME. It may be better to delay expanding the logic of this until
8764 pass_expand_omp. The expanded logic may make the job more difficult
8765 to a synchronization analysis pass. */
8767 static void
8768 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8770 location_t loc = gimple_location (single_stmt);
8771 tree tlabel = create_artificial_label (loc);
8772 tree flabel = create_artificial_label (loc);
8773 gimple *call, *cond;
8774 tree lhs, decl;
8776 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8777 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8778 call = gimple_build_call (decl, 0);
8779 gimple_call_set_lhs (call, lhs);
8780 gimple_seq_add_stmt (pre_p, call);
8782 cond = gimple_build_cond (EQ_EXPR, lhs,
8783 fold_convert_loc (loc, TREE_TYPE (lhs),
8784 boolean_true_node),
8785 tlabel, flabel);
8786 gimple_seq_add_stmt (pre_p, cond);
8787 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8788 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8789 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8793 /* A subroutine of lower_omp_single. Expand the simple form of
8794 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8796 #pragma omp single copyprivate (a, b, c)
8798 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8801 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8803 BODY;
8804 copyout.a = a;
8805 copyout.b = b;
8806 copyout.c = c;
8807 GOMP_single_copy_end (&copyout);
8809 else
8811 a = copyout_p->a;
8812 b = copyout_p->b;
8813 c = copyout_p->c;
8815 GOMP_barrier ();
8818 FIXME. It may be better to delay expanding the logic of this until
8819 pass_expand_omp. The expanded logic may make the job more difficult
8820 to a synchronization analysis pass. */
8822 static void
8823 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8824 omp_context *ctx)
8826 tree ptr_type, t, l0, l1, l2, bfn_decl;
8827 gimple_seq copyin_seq;
8828 location_t loc = gimple_location (single_stmt);
8830 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8832 ptr_type = build_pointer_type (ctx->record_type);
8833 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8835 l0 = create_artificial_label (loc);
8836 l1 = create_artificial_label (loc);
8837 l2 = create_artificial_label (loc);
8839 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8840 t = build_call_expr_loc (loc, bfn_decl, 0);
8841 t = fold_convert_loc (loc, ptr_type, t);
8842 gimplify_assign (ctx->receiver_decl, t, pre_p);
8844 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8845 build_int_cst (ptr_type, 0));
8846 t = build3 (COND_EXPR, void_type_node, t,
8847 build_and_jump (&l0), build_and_jump (&l1));
8848 gimplify_and_add (t, pre_p);
8850 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8852 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8854 copyin_seq = NULL;
8855 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8856 &copyin_seq, ctx);
8858 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8859 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8860 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8861 gimplify_and_add (t, pre_p);
8863 t = build_and_jump (&l2);
8864 gimplify_and_add (t, pre_p);
8866 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8868 gimple_seq_add_seq (pre_p, copyin_seq);
8870 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8874 /* Expand code for an OpenMP single directive. */
8876 static void
8877 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8879 tree block;
8880 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8881 gbind *bind;
8882 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8884 push_gimplify_context ();
8886 block = make_node (BLOCK);
8887 bind = gimple_build_bind (NULL, NULL, block);
8888 gsi_replace (gsi_p, bind, true);
8889 bind_body = NULL;
8890 dlist = NULL;
8891 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8892 &bind_body, &dlist, ctx, NULL);
8893 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8895 gimple_seq_add_stmt (&bind_body, single_stmt);
8897 if (ctx->record_type)
8898 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8899 else
8900 lower_omp_single_simple (single_stmt, &bind_body);
8902 gimple_omp_set_body (single_stmt, NULL);
8904 gimple_seq_add_seq (&bind_body, dlist);
8906 bind_body = maybe_catch_exception (bind_body);
8908 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8909 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8910 gimple *g = gimple_build_omp_return (nowait);
8911 gimple_seq_add_stmt (&bind_body_tail, g);
8912 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8913 if (ctx->record_type)
8915 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8916 tree clobber = build_clobber (ctx->record_type);
8917 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8918 clobber), GSI_SAME_STMT);
8920 gimple_seq_add_seq (&bind_body, bind_body_tail);
8921 gimple_bind_set_body (bind, bind_body);
8923 pop_gimplify_context (bind);
8925 gimple_bind_append_vars (bind, ctx->block_vars);
8926 BLOCK_VARS (block) = ctx->block_vars;
8927 if (BLOCK_VARS (block))
8928 TREE_USED (block) = 1;
8932 /* Lower code for an OMP scope directive. */
8934 static void
8935 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8937 tree block;
8938 gimple *scope_stmt = gsi_stmt (*gsi_p);
8939 gbind *bind;
8940 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8941 gimple_seq tred_dlist = NULL;
8943 push_gimplify_context ();
8945 block = make_node (BLOCK);
8946 bind = gimple_build_bind (NULL, NULL, block);
8947 gsi_replace (gsi_p, bind, true);
8948 bind_body = NULL;
8949 dlist = NULL;
8951 tree rclauses
8952 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8953 OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8954 if (rclauses)
8956 tree type = build_pointer_type (pointer_sized_int_node);
8957 tree temp = create_tmp_var (type);
8958 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8959 OMP_CLAUSE_DECL (c) = temp;
8960 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8961 gimple_omp_scope_set_clauses (scope_stmt, c);
8962 lower_omp_task_reductions (ctx, OMP_SCOPE,
8963 gimple_omp_scope_clauses (scope_stmt),
8964 &bind_body, &tred_dlist);
8965 rclauses = c;
8966 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8967 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8968 gimple_seq_add_stmt (&bind_body, stmt);
8971 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8972 &bind_body, &dlist, ctx, NULL);
8973 lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8975 gimple_seq_add_stmt (&bind_body, scope_stmt);
8977 gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8979 gimple_omp_set_body (scope_stmt, NULL);
8981 gimple_seq clist = NULL;
8982 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8983 &bind_body, &clist, ctx);
8984 if (clist)
8986 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8987 gcall *g = gimple_build_call (fndecl, 0);
8988 gimple_seq_add_stmt (&bind_body, g);
8989 gimple_seq_add_seq (&bind_body, clist);
8990 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8991 g = gimple_build_call (fndecl, 0);
8992 gimple_seq_add_stmt (&bind_body, g);
8995 gimple_seq_add_seq (&bind_body, dlist);
8997 bind_body = maybe_catch_exception (bind_body);
8999 bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
9000 OMP_CLAUSE_NOWAIT) != NULL_TREE;
9001 gimple *g = gimple_build_omp_return (nowait);
9002 gimple_seq_add_stmt (&bind_body_tail, g);
9003 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
9004 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
9005 if (ctx->record_type)
9007 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
9008 tree clobber = build_clobber (ctx->record_type);
9009 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
9010 clobber), GSI_SAME_STMT);
9012 gimple_seq_add_seq (&bind_body, bind_body_tail);
9014 gimple_bind_set_body (bind, bind_body);
9016 pop_gimplify_context (bind);
9018 gimple_bind_append_vars (bind, ctx->block_vars);
9019 BLOCK_VARS (block) = ctx->block_vars;
9020 if (BLOCK_VARS (block))
9021 TREE_USED (block) = 1;
9023 /* Expand code for an OpenMP master or masked directive. */
9025 static void
9026 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9028 tree block, lab = NULL, x, bfn_decl;
9029 gimple *stmt = gsi_stmt (*gsi_p);
9030 gbind *bind;
9031 location_t loc = gimple_location (stmt);
9032 gimple_seq tseq;
9033 tree filter = integer_zero_node;
9035 push_gimplify_context ();
9037 if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
9039 filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
9040 OMP_CLAUSE_FILTER);
9041 if (filter)
9042 filter = fold_convert (integer_type_node,
9043 OMP_CLAUSE_FILTER_EXPR (filter));
9044 else
9045 filter = integer_zero_node;
9047 block = make_node (BLOCK);
9048 bind = gimple_build_bind (NULL, NULL, block);
9049 gsi_replace (gsi_p, bind, true);
9050 gimple_bind_add_stmt (bind, stmt);
9052 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9053 x = build_call_expr_loc (loc, bfn_decl, 0);
9054 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
9055 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
9056 tseq = NULL;
9057 gimplify_and_add (x, &tseq);
9058 gimple_bind_add_seq (bind, tseq);
9060 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9061 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9062 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9063 gimple_omp_set_body (stmt, NULL);
9065 gimple_bind_add_stmt (bind, gimple_build_label (lab));
9067 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9069 pop_gimplify_context (bind);
9071 gimple_bind_append_vars (bind, ctx->block_vars);
9072 BLOCK_VARS (block) = ctx->block_vars;
9075 /* Helper function for lower_omp_task_reductions. For a specific PASS
9076 find out the current clause it should be processed, or return false
9077 if all have been processed already. */
9079 static inline bool
9080 omp_task_reduction_iterate (int pass, enum tree_code code,
9081 enum omp_clause_code ccode, tree *c, tree *decl,
9082 tree *type, tree *next)
9084 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
9086 if (ccode == OMP_CLAUSE_REDUCTION
9087 && code != OMP_TASKLOOP
9088 && !OMP_CLAUSE_REDUCTION_TASK (*c))
9089 continue;
9090 *decl = OMP_CLAUSE_DECL (*c);
9091 *type = TREE_TYPE (*decl);
9092 if (TREE_CODE (*decl) == MEM_REF)
9094 if (pass != 1)
9095 continue;
9097 else
9099 if (omp_privatize_by_reference (*decl))
9100 *type = TREE_TYPE (*type);
9101 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
9102 continue;
9104 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
9105 return true;
9107 *decl = NULL_TREE;
9108 *type = NULL_TREE;
9109 *next = NULL_TREE;
9110 return false;
9113 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9114 OMP_TASKGROUP only with task modifier). Register mapping of those in
9115 START sequence and reducing them and unregister them in the END sequence. */
9117 static void
9118 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
9119 gimple_seq *start, gimple_seq *end)
9121 enum omp_clause_code ccode
9122 = (code == OMP_TASKGROUP
9123 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
9124 tree cancellable = NULL_TREE;
9125 clauses = omp_task_reductions_find_first (clauses, code, ccode);
9126 if (clauses == NULL_TREE)
9127 return;
9128 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9130 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
9131 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
9132 && outer->cancellable)
9134 cancellable = error_mark_node;
9135 break;
9137 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
9138 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
9139 break;
9141 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
9142 tree *last = &TYPE_FIELDS (record_type);
9143 unsigned cnt = 0;
9144 if (cancellable)
9146 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9147 ptr_type_node);
9148 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9149 integer_type_node);
9150 *last = field;
9151 DECL_CHAIN (field) = ifield;
9152 last = &DECL_CHAIN (ifield);
9153 DECL_CONTEXT (field) = record_type;
9154 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9155 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9156 DECL_CONTEXT (ifield) = record_type;
9157 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9158 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9160 for (int pass = 0; pass < 2; pass++)
9162 tree decl, type, next;
9163 for (tree c = clauses;
9164 omp_task_reduction_iterate (pass, code, ccode,
9165 &c, &decl, &type, &next); c = next)
9167 ++cnt;
9168 tree new_type = type;
9169 if (ctx->outer)
9170 new_type = remap_type (type, &ctx->outer->cb);
9171 tree field
9172 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9173 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9174 new_type);
9175 if (DECL_P (decl) && type == TREE_TYPE (decl))
9177 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9178 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9179 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9181 else
9182 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9183 DECL_CONTEXT (field) = record_type;
9184 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9185 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9186 *last = field;
9187 last = &DECL_CHAIN (field);
9188 tree bfield
9189 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9190 boolean_type_node);
9191 DECL_CONTEXT (bfield) = record_type;
9192 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9193 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9194 *last = bfield;
9195 last = &DECL_CHAIN (bfield);
9198 *last = NULL_TREE;
9199 layout_type (record_type);
9201 /* Build up an array which registers with the runtime all the reductions
9202 and deregisters them at the end. Format documented in libgomp/task.c. */
9203 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9204 tree avar = create_tmp_var_raw (atype);
9205 gimple_add_tmp_var (avar);
9206 TREE_ADDRESSABLE (avar) = 1;
9207 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9208 NULL_TREE, NULL_TREE);
9209 tree t = build_int_cst (pointer_sized_int_node, cnt);
9210 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9211 gimple_seq seq = NULL;
9212 tree sz = fold_convert (pointer_sized_int_node,
9213 TYPE_SIZE_UNIT (record_type));
9214 int cachesz = 64;
9215 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9216 build_int_cst (pointer_sized_int_node, cachesz - 1));
9217 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9218 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9219 ctx->task_reductions.create (1 + cnt);
9220 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9221 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9222 ? sz : NULL_TREE);
9223 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9224 gimple_seq_add_seq (start, seq);
9225 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9226 NULL_TREE, NULL_TREE);
9227 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9228 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9229 NULL_TREE, NULL_TREE);
9230 t = build_int_cst (pointer_sized_int_node,
9231 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9232 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9233 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9234 NULL_TREE, NULL_TREE);
9235 t = build_int_cst (pointer_sized_int_node, -1);
9236 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9237 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9238 NULL_TREE, NULL_TREE);
9239 t = build_int_cst (pointer_sized_int_node, 0);
9240 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9242 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9243 and for each task reduction checks a bool right after the private variable
9244 within that thread's chunk; if the bool is clear, it hasn't been
9245 initialized and thus isn't going to be reduced nor destructed, otherwise
9246 reduce and destruct it. */
9247 tree idx = create_tmp_var (size_type_node);
9248 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9249 tree num_thr_sz = create_tmp_var (size_type_node);
9250 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9251 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9252 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9253 gimple *g;
9254 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9256 /* For worksharing constructs or scope, only perform it in the master
9257 thread, with the exception of cancelled implicit barriers - then only
9258 handle the current thread. */
9259 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9260 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9261 tree thr_num = create_tmp_var (integer_type_node);
9262 g = gimple_build_call (t, 0);
9263 gimple_call_set_lhs (g, thr_num);
9264 gimple_seq_add_stmt (end, g);
9265 if (cancellable)
9267 tree c;
9268 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9269 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9270 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9271 if (code == OMP_FOR)
9272 c = gimple_omp_for_clauses (ctx->stmt);
9273 else if (code == OMP_SECTIONS)
9274 c = gimple_omp_sections_clauses (ctx->stmt);
9275 else /* if (code == OMP_SCOPE) */
9276 c = gimple_omp_scope_clauses (ctx->stmt);
9277 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9278 cancellable = c;
9279 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9280 lab5, lab6);
9281 gimple_seq_add_stmt (end, g);
9282 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9283 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9284 gimple_seq_add_stmt (end, g);
9285 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9286 build_one_cst (TREE_TYPE (idx)));
9287 gimple_seq_add_stmt (end, g);
9288 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9289 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9291 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9292 gimple_seq_add_stmt (end, g);
9293 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9295 if (code != OMP_PARALLEL)
9297 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9298 tree num_thr = create_tmp_var (integer_type_node);
9299 g = gimple_build_call (t, 0);
9300 gimple_call_set_lhs (g, num_thr);
9301 gimple_seq_add_stmt (end, g);
9302 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9303 gimple_seq_add_stmt (end, g);
9304 if (cancellable)
9305 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9307 else
9309 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9310 OMP_CLAUSE__REDUCTEMP_);
9311 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9312 t = fold_convert (size_type_node, t);
9313 gimplify_assign (num_thr_sz, t, end);
9315 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9316 NULL_TREE, NULL_TREE);
9317 tree data = create_tmp_var (pointer_sized_int_node);
9318 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9319 if (code == OMP_TASKLOOP)
9321 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9322 g = gimple_build_cond (NE_EXPR, data,
9323 build_zero_cst (pointer_sized_int_node),
9324 lab1, lab7);
9325 gimple_seq_add_stmt (end, g);
9327 gimple_seq_add_stmt (end, gimple_build_label (lab1));
9328 tree ptr;
9329 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9330 ptr = create_tmp_var (build_pointer_type (record_type));
9331 else
9332 ptr = create_tmp_var (ptr_type_node);
9333 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9335 tree field = TYPE_FIELDS (record_type);
9336 cnt = 0;
9337 if (cancellable)
9338 field = DECL_CHAIN (DECL_CHAIN (field));
9339 for (int pass = 0; pass < 2; pass++)
9341 tree decl, type, next;
9342 for (tree c = clauses;
9343 omp_task_reduction_iterate (pass, code, ccode,
9344 &c, &decl, &type, &next); c = next)
9346 tree var = decl, ref;
9347 if (TREE_CODE (decl) == MEM_REF)
9349 var = TREE_OPERAND (var, 0);
9350 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9351 var = TREE_OPERAND (var, 0);
9352 tree v = var;
9353 if (TREE_CODE (var) == ADDR_EXPR)
9354 var = TREE_OPERAND (var, 0);
9355 else if (TREE_CODE (var) == INDIRECT_REF)
9356 var = TREE_OPERAND (var, 0);
9357 tree orig_var = var;
9358 if (is_variable_sized (var))
9360 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9361 var = DECL_VALUE_EXPR (var);
9362 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
9363 var = TREE_OPERAND (var, 0);
9364 gcc_assert (DECL_P (var));
9366 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9367 if (orig_var != var)
9368 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9369 else if (TREE_CODE (v) == ADDR_EXPR)
9370 t = build_fold_addr_expr (t);
9371 else if (TREE_CODE (v) == INDIRECT_REF)
9372 t = build_fold_indirect_ref (t);
9373 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9375 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9376 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9377 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9379 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9380 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9381 fold_convert (size_type_node,
9382 TREE_OPERAND (decl, 1)));
9384 else
9386 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9387 if (!omp_privatize_by_reference (decl))
9388 t = build_fold_addr_expr (t);
9390 t = fold_convert (pointer_sized_int_node, t);
9391 seq = NULL;
9392 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9393 gimple_seq_add_seq (start, seq);
9394 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9395 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9396 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9397 t = unshare_expr (byte_position (field));
9398 t = fold_convert (pointer_sized_int_node, t);
9399 ctx->task_reduction_map->put (c, cnt);
9400 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9401 ? t : NULL_TREE);
9402 seq = NULL;
9403 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9404 gimple_seq_add_seq (start, seq);
9405 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9406 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9407 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9409 tree bfield = DECL_CHAIN (field);
9410 tree cond;
9411 if (code == OMP_PARALLEL
9412 || code == OMP_FOR
9413 || code == OMP_SECTIONS
9414 || code == OMP_SCOPE)
9415 /* In parallel, worksharing or scope all threads unconditionally
9416 initialize all their task reduction private variables. */
9417 cond = boolean_true_node;
9418 else if (TREE_TYPE (ptr) == ptr_type_node)
9420 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9421 unshare_expr (byte_position (bfield)));
9422 seq = NULL;
9423 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9424 gimple_seq_add_seq (end, seq);
9425 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9426 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9427 build_int_cst (pbool, 0));
9429 else
9430 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9431 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9432 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9433 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9434 tree condv = create_tmp_var (boolean_type_node);
9435 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9436 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9437 lab3, lab4);
9438 gimple_seq_add_stmt (end, g);
9439 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9440 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9442 /* If this reduction doesn't need destruction and parallel
9443 has been cancelled, there is nothing to do for this
9444 reduction, so jump around the merge operation. */
9445 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9446 g = gimple_build_cond (NE_EXPR, cancellable,
9447 build_zero_cst (TREE_TYPE (cancellable)),
9448 lab4, lab5);
9449 gimple_seq_add_stmt (end, g);
9450 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9453 tree new_var;
9454 if (TREE_TYPE (ptr) == ptr_type_node)
9456 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9457 unshare_expr (byte_position (field)));
9458 seq = NULL;
9459 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9460 gimple_seq_add_seq (end, seq);
9461 tree pbool = build_pointer_type (TREE_TYPE (field));
9462 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9463 build_int_cst (pbool, 0));
9465 else
9466 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9467 build_simple_mem_ref (ptr), field, NULL_TREE);
9469 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9470 if (TREE_CODE (decl) != MEM_REF
9471 && omp_privatize_by_reference (decl))
9472 ref = build_simple_mem_ref (ref);
9473 /* reduction(-:var) sums up the partial results, so it acts
9474 identically to reduction(+:var). */
9475 if (rcode == MINUS_EXPR)
9476 rcode = PLUS_EXPR;
9477 if (TREE_CODE (decl) == MEM_REF)
9479 tree type = TREE_TYPE (new_var);
9480 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9481 tree i = create_tmp_var (TREE_TYPE (v));
9482 tree ptype = build_pointer_type (TREE_TYPE (type));
9483 if (DECL_P (v))
9485 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9486 tree vv = create_tmp_var (TREE_TYPE (v));
9487 gimplify_assign (vv, v, start);
9488 v = vv;
9490 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9491 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9492 new_var = build_fold_addr_expr (new_var);
9493 new_var = fold_convert (ptype, new_var);
9494 ref = fold_convert (ptype, ref);
9495 tree m = create_tmp_var (ptype);
9496 gimplify_assign (m, new_var, end);
9497 new_var = m;
9498 m = create_tmp_var (ptype);
9499 gimplify_assign (m, ref, end);
9500 ref = m;
9501 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9502 tree body = create_artificial_label (UNKNOWN_LOCATION);
9503 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9504 gimple_seq_add_stmt (end, gimple_build_label (body));
9505 tree priv = build_simple_mem_ref (new_var);
9506 tree out = build_simple_mem_ref (ref);
9507 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9509 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9510 tree decl_placeholder
9511 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9512 tree lab6 = NULL_TREE;
9513 if (cancellable)
9515 /* If this reduction needs destruction and parallel
9516 has been cancelled, jump around the merge operation
9517 to the destruction. */
9518 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9519 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9520 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9521 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9522 lab6, lab5);
9523 gimple_seq_add_stmt (end, g);
9524 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9526 SET_DECL_VALUE_EXPR (placeholder, out);
9527 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9528 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9529 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9530 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9531 gimple_seq_add_seq (end,
9532 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9533 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9534 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9536 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9537 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9539 if (cancellable)
9540 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9541 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9542 if (x)
9544 gimple_seq tseq = NULL;
9545 gimplify_stmt (&x, &tseq);
9546 gimple_seq_add_seq (end, tseq);
9549 else
9551 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9552 out = unshare_expr (out);
9553 gimplify_assign (out, x, end);
9555 gimple *g
9556 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9557 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9558 gimple_seq_add_stmt (end, g);
9559 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9560 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9561 gimple_seq_add_stmt (end, g);
9562 g = gimple_build_assign (i, PLUS_EXPR, i,
9563 build_int_cst (TREE_TYPE (i), 1));
9564 gimple_seq_add_stmt (end, g);
9565 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9566 gimple_seq_add_stmt (end, g);
9567 gimple_seq_add_stmt (end, gimple_build_label (endl));
9569 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9571 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9572 tree oldv = NULL_TREE;
9573 tree lab6 = NULL_TREE;
9574 if (cancellable)
9576 /* If this reduction needs destruction and parallel
9577 has been cancelled, jump around the merge operation
9578 to the destruction. */
9579 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9580 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9581 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9582 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9583 lab6, lab5);
9584 gimple_seq_add_stmt (end, g);
9585 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9587 if (omp_privatize_by_reference (decl)
9588 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9589 TREE_TYPE (ref)))
9590 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9591 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9592 tree refv = create_tmp_var (TREE_TYPE (ref));
9593 gimplify_assign (refv, ref, end);
9594 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9595 SET_DECL_VALUE_EXPR (placeholder, ref);
9596 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9597 tree d = maybe_lookup_decl (decl, ctx);
9598 gcc_assert (d);
9599 if (DECL_HAS_VALUE_EXPR_P (d))
9600 oldv = DECL_VALUE_EXPR (d);
9601 if (omp_privatize_by_reference (var))
9603 tree v = fold_convert (TREE_TYPE (d),
9604 build_fold_addr_expr (new_var));
9605 SET_DECL_VALUE_EXPR (d, v);
9607 else
9608 SET_DECL_VALUE_EXPR (d, new_var);
9609 DECL_HAS_VALUE_EXPR_P (d) = 1;
9610 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9611 if (oldv)
9612 SET_DECL_VALUE_EXPR (d, oldv);
9613 else
9615 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9616 DECL_HAS_VALUE_EXPR_P (d) = 0;
9618 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9619 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9620 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9621 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9622 if (cancellable)
9623 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9624 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9625 if (x)
9627 gimple_seq tseq = NULL;
9628 gimplify_stmt (&x, &tseq);
9629 gimple_seq_add_seq (end, tseq);
9632 else
9634 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9635 ref = unshare_expr (ref);
9636 gimplify_assign (ref, x, end);
9638 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9639 ++cnt;
9640 field = DECL_CHAIN (bfield);
9644 if (code == OMP_TASKGROUP)
9646 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9647 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9648 gimple_seq_add_stmt (start, g);
9650 else
9652 tree c;
9653 if (code == OMP_FOR)
9654 c = gimple_omp_for_clauses (ctx->stmt);
9655 else if (code == OMP_SECTIONS)
9656 c = gimple_omp_sections_clauses (ctx->stmt);
9657 else if (code == OMP_SCOPE)
9658 c = gimple_omp_scope_clauses (ctx->stmt);
9659 else
9660 c = gimple_omp_taskreg_clauses (ctx->stmt);
9661 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9662 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9663 build_fold_addr_expr (avar));
9664 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9667 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9668 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9669 size_one_node));
9670 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9671 gimple_seq_add_stmt (end, g);
9672 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9673 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9675 enum built_in_function bfn
9676 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9677 t = builtin_decl_explicit (bfn);
9678 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9679 tree arg;
9680 if (cancellable)
9682 arg = create_tmp_var (c_bool_type);
9683 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9684 cancellable));
9686 else
9687 arg = build_int_cst (c_bool_type, 0);
9688 g = gimple_build_call (t, 1, arg);
9690 else
9692 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9693 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9695 gimple_seq_add_stmt (end, g);
9696 if (lab7)
9697 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9698 t = build_constructor (atype, NULL);
9699 TREE_THIS_VOLATILE (t) = 1;
9700 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9703 /* Expand code for an OpenMP taskgroup directive. */
9705 static void
9706 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9708 gimple *stmt = gsi_stmt (*gsi_p);
9709 gcall *x;
9710 gbind *bind;
9711 gimple_seq dseq = NULL;
9712 tree block = make_node (BLOCK);
9714 bind = gimple_build_bind (NULL, NULL, block);
9715 gsi_replace (gsi_p, bind, true);
9716 gimple_bind_add_stmt (bind, stmt);
9718 push_gimplify_context ();
9720 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9722 gimple_bind_add_stmt (bind, x);
9724 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9725 gimple_omp_taskgroup_clauses (stmt),
9726 gimple_bind_body_ptr (bind), &dseq);
9728 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9729 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9730 gimple_omp_set_body (stmt, NULL);
9732 gimple_bind_add_seq (bind, dseq);
9734 pop_gimplify_context (bind);
9736 gimple_bind_append_vars (bind, ctx->block_vars);
9737 BLOCK_VARS (block) = ctx->block_vars;
9741 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9743 static void
9744 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9745 omp_context *ctx)
9747 struct omp_for_data fd;
9748 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9749 return;
9751 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9752 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9753 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9754 if (!fd.ordered)
9755 return;
9757 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9758 tree c = gimple_omp_ordered_clauses (ord_stmt);
9759 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
9760 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
9762 /* Merge depend clauses from multiple adjacent
9763 #pragma omp ordered depend(sink:...) constructs
9764 into one #pragma omp ordered depend(sink:...), so that
9765 we can optimize them together. */
9766 gimple_stmt_iterator gsi = *gsi_p;
9767 gsi_next (&gsi);
9768 while (!gsi_end_p (gsi))
9770 gimple *stmt = gsi_stmt (gsi);
9771 if (is_gimple_debug (stmt)
9772 || gimple_code (stmt) == GIMPLE_NOP)
9774 gsi_next (&gsi);
9775 continue;
9777 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9778 break;
9779 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9780 c = gimple_omp_ordered_clauses (ord_stmt2);
9781 if (c == NULL_TREE
9782 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS
9783 || OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK)
9784 break;
9785 while (*list_p)
9786 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9787 *list_p = c;
9788 gsi_remove (&gsi, true);
9792 /* Canonicalize sink dependence clauses into one folded clause if
9793 possible.
9795 The basic algorithm is to create a sink vector whose first
9796 element is the GCD of all the first elements, and whose remaining
9797 elements are the minimum of the subsequent columns.
9799 We ignore dependence vectors whose first element is zero because
9800 such dependencies are known to be executed by the same thread.
9802 We take into account the direction of the loop, so a minimum
9803 becomes a maximum if the loop is iterating forwards. We also
9804 ignore sink clauses where the loop direction is unknown, or where
9805 the offsets are clearly invalid because they are not a multiple
9806 of the loop increment.
9808 For example:
9810 #pragma omp for ordered(2)
9811 for (i=0; i < N; ++i)
9812 for (j=0; j < M; ++j)
9814 #pragma omp ordered \
9815 depend(sink:i-8,j-2) \
9816 depend(sink:i,j-1) \ // Completely ignored because i+0.
9817 depend(sink:i-4,j-3) \
9818 depend(sink:i-6,j-4)
9819 #pragma omp ordered depend(source)
9822 Folded clause is:
9824 depend(sink:-gcd(8,4,6),-min(2,3,4))
9825 -or-
9826 depend(sink:-2,-2)
9829 /* FIXME: Computing GCD's where the first element is zero is
9830 non-trivial in the presence of collapsed loops. Do this later. */
9831 if (fd.collapse > 1)
9832 return;
9834 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9836 /* wide_int is not a POD so it must be default-constructed. */
9837 for (unsigned i = 0; i != 2 * len - 1; ++i)
9838 new (static_cast<void*>(folded_deps + i)) wide_int ();
9840 tree folded_dep = NULL_TREE;
9841 /* TRUE if the first dimension's offset is negative. */
9842 bool neg_offset_p = false;
9844 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9845 unsigned int i;
9846 while ((c = *list_p) != NULL)
9848 bool remove = false;
9850 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS);
9851 if (OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK)
9852 goto next_ordered_clause;
9854 tree vec;
9855 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9856 vec && TREE_CODE (vec) == TREE_LIST;
9857 vec = TREE_CHAIN (vec), ++i)
9859 gcc_assert (i < len);
9861 /* omp_extract_for_data has canonicalized the condition. */
9862 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9863 || fd.loops[i].cond_code == GT_EXPR);
9864 bool forward = fd.loops[i].cond_code == LT_EXPR;
9865 bool maybe_lexically_later = true;
9867 /* While the committee makes up its mind, bail if we have any
9868 non-constant steps. */
9869 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9870 goto lower_omp_ordered_ret;
9872 tree itype = TREE_TYPE (TREE_VALUE (vec));
9873 if (POINTER_TYPE_P (itype))
9874 itype = sizetype;
9875 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9876 TYPE_PRECISION (itype),
9877 TYPE_SIGN (itype));
9879 /* Ignore invalid offsets that are not multiples of the step. */
9880 if (!wi::multiple_of_p (wi::abs (offset),
9881 wi::abs (wi::to_wide (fd.loops[i].step)),
9882 UNSIGNED))
9884 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9885 "ignoring sink clause with offset that is not "
9886 "a multiple of the loop step");
9887 remove = true;
9888 goto next_ordered_clause;
9891 /* Calculate the first dimension. The first dimension of
9892 the folded dependency vector is the GCD of the first
9893 elements, while ignoring any first elements whose offset
9894 is 0. */
9895 if (i == 0)
9897 /* Ignore dependence vectors whose first dimension is 0. */
9898 if (offset == 0)
9900 remove = true;
9901 goto next_ordered_clause;
9903 else
9905 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9907 error_at (OMP_CLAUSE_LOCATION (c),
9908 "first offset must be in opposite direction "
9909 "of loop iterations");
9910 goto lower_omp_ordered_ret;
9912 if (forward)
9913 offset = -offset;
9914 neg_offset_p = forward;
9915 /* Initialize the first time around. */
9916 if (folded_dep == NULL_TREE)
9918 folded_dep = c;
9919 folded_deps[0] = offset;
9921 else
9922 folded_deps[0] = wi::gcd (folded_deps[0],
9923 offset, UNSIGNED);
9926 /* Calculate minimum for the remaining dimensions. */
9927 else
9929 folded_deps[len + i - 1] = offset;
9930 if (folded_dep == c)
9931 folded_deps[i] = offset;
9932 else if (maybe_lexically_later
9933 && !wi::eq_p (folded_deps[i], offset))
9935 if (forward ^ wi::gts_p (folded_deps[i], offset))
9937 unsigned int j;
9938 folded_dep = c;
9939 for (j = 1; j <= i; j++)
9940 folded_deps[j] = folded_deps[len + j - 1];
9942 else
9943 maybe_lexically_later = false;
9947 gcc_assert (i == len);
9949 remove = true;
9951 next_ordered_clause:
9952 if (remove)
9953 *list_p = OMP_CLAUSE_CHAIN (c);
9954 else
9955 list_p = &OMP_CLAUSE_CHAIN (c);
9958 if (folded_dep)
9960 if (neg_offset_p)
9961 folded_deps[0] = -folded_deps[0];
9963 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9964 if (POINTER_TYPE_P (itype))
9965 itype = sizetype;
9967 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9968 = wide_int_to_tree (itype, folded_deps[0]);
9969 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9970 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9973 lower_omp_ordered_ret:
9975 /* Ordered without clauses is #pragma omp threads, while we want
9976 a nop instead if we remove all clauses. */
9977 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9978 gsi_replace (gsi_p, gimple_build_nop (), true);
9982 /* Expand code for an OpenMP ordered directive. */
9984 static void
9985 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9987 tree block;
9988 gimple *stmt = gsi_stmt (*gsi_p), *g;
9989 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9990 gcall *x;
9991 gbind *bind;
9992 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9993 OMP_CLAUSE_SIMD);
9994 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9995 loop. */
9996 bool maybe_simt
9997 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9998 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9999 OMP_CLAUSE_THREADS);
10001 if (gimple_omp_ordered_standalone_p (ord_stmt))
10003 /* FIXME: This is needs to be moved to the expansion to verify various
10004 conditions only testable on cfg with dominators computed, and also
10005 all the depend clauses to be merged still might need to be available
10006 for the runtime checks. */
10007 if (0)
10008 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
10009 return;
10012 push_gimplify_context ();
10014 block = make_node (BLOCK);
10015 bind = gimple_build_bind (NULL, NULL, block);
10016 gsi_replace (gsi_p, bind, true);
10017 gimple_bind_add_stmt (bind, stmt);
10019 if (simd)
10021 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
10022 build_int_cst (NULL_TREE, threads));
10023 cfun->has_simduid_loops = true;
10025 else
10026 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
10028 gimple_bind_add_stmt (bind, x);
10030 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
10031 if (maybe_simt)
10033 counter = create_tmp_var (integer_type_node);
10034 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
10035 gimple_call_set_lhs (g, counter);
10036 gimple_bind_add_stmt (bind, g);
10038 body = create_artificial_label (UNKNOWN_LOCATION);
10039 test = create_artificial_label (UNKNOWN_LOCATION);
10040 gimple_bind_add_stmt (bind, gimple_build_label (body));
10042 tree simt_pred = create_tmp_var (integer_type_node);
10043 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
10044 gimple_call_set_lhs (g, simt_pred);
10045 gimple_bind_add_stmt (bind, g);
10047 tree t = create_artificial_label (UNKNOWN_LOCATION);
10048 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
10049 gimple_bind_add_stmt (bind, g);
10051 gimple_bind_add_stmt (bind, gimple_build_label (t));
10053 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10054 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10055 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10056 gimple_omp_set_body (stmt, NULL);
10058 if (maybe_simt)
10060 gimple_bind_add_stmt (bind, gimple_build_label (test));
10061 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
10062 gimple_bind_add_stmt (bind, g);
10064 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
10065 tree nonneg = create_tmp_var (integer_type_node);
10066 gimple_seq tseq = NULL;
10067 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
10068 gimple_bind_add_seq (bind, tseq);
10070 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
10071 gimple_call_set_lhs (g, nonneg);
10072 gimple_bind_add_stmt (bind, g);
10074 tree end = create_artificial_label (UNKNOWN_LOCATION);
10075 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
10076 gimple_bind_add_stmt (bind, g);
10078 gimple_bind_add_stmt (bind, gimple_build_label (end));
10080 if (simd)
10081 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
10082 build_int_cst (NULL_TREE, threads));
10083 else
10084 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
10086 gimple_bind_add_stmt (bind, x);
10088 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10090 pop_gimplify_context (bind);
10092 gimple_bind_append_vars (bind, ctx->block_vars);
10093 BLOCK_VARS (block) = gimple_bind_vars (bind);
10097 /* Expand code for an OpenMP scan directive and the structured block
10098 before the scan directive. */
10100 static void
10101 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10103 gimple *stmt = gsi_stmt (*gsi_p);
10104 bool has_clauses
10105 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
10106 tree lane = NULL_TREE;
10107 gimple_seq before = NULL;
10108 omp_context *octx = ctx->outer;
10109 gcc_assert (octx);
10110 if (octx->scan_exclusive && !has_clauses)
10112 gimple_stmt_iterator gsi2 = *gsi_p;
10113 gsi_next (&gsi2);
10114 gimple *stmt2 = gsi_stmt (gsi2);
10115 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10116 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10117 the one with exclusive clause(s), comes first. */
10118 if (stmt2
10119 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
10120 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
10122 gsi_remove (gsi_p, false);
10123 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
10124 ctx = maybe_lookup_ctx (stmt2);
10125 gcc_assert (ctx);
10126 lower_omp_scan (gsi_p, ctx);
10127 return;
10131 bool input_phase = has_clauses ^ octx->scan_inclusive;
10132 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10133 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
10134 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10135 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
10136 && !gimple_omp_for_combined_p (octx->stmt));
10137 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
10138 if (is_for_simd && octx->for_simd_scan_phase)
10139 is_simd = false;
10140 if (is_simd)
10141 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
10142 OMP_CLAUSE__SIMDUID_))
10144 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10145 lane = create_tmp_var (unsigned_type_node);
10146 tree t = build_int_cst (integer_type_node,
10147 input_phase ? 1
10148 : octx->scan_inclusive ? 2 : 3);
10149 gimple *g
10150 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10151 gimple_call_set_lhs (g, lane);
10152 gimple_seq_add_stmt (&before, g);
10155 if (is_simd || is_for)
10157 for (tree c = gimple_omp_for_clauses (octx->stmt);
10158 c; c = OMP_CLAUSE_CHAIN (c))
10159 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10160 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10162 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10163 tree var = OMP_CLAUSE_DECL (c);
10164 tree new_var = lookup_decl (var, octx);
10165 tree val = new_var;
10166 tree var2 = NULL_TREE;
10167 tree var3 = NULL_TREE;
10168 tree var4 = NULL_TREE;
10169 tree lane0 = NULL_TREE;
10170 tree new_vard = new_var;
10171 if (omp_privatize_by_reference (var))
10173 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10174 val = new_var;
10176 if (DECL_HAS_VALUE_EXPR_P (new_vard))
10178 val = DECL_VALUE_EXPR (new_vard);
10179 if (new_vard != new_var)
10181 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10182 val = TREE_OPERAND (val, 0);
10184 if (TREE_CODE (val) == ARRAY_REF
10185 && VAR_P (TREE_OPERAND (val, 0)))
10187 tree v = TREE_OPERAND (val, 0);
10188 if (lookup_attribute ("omp simd array",
10189 DECL_ATTRIBUTES (v)))
10191 val = unshare_expr (val);
10192 lane0 = TREE_OPERAND (val, 1);
10193 TREE_OPERAND (val, 1) = lane;
10194 var2 = lookup_decl (v, octx);
10195 if (octx->scan_exclusive)
10196 var4 = lookup_decl (var2, octx);
10197 if (input_phase
10198 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10199 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10200 if (!input_phase)
10202 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10203 var2, lane, NULL_TREE, NULL_TREE);
10204 TREE_THIS_NOTRAP (var2) = 1;
10205 if (octx->scan_exclusive)
10207 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10208 var4, lane, NULL_TREE,
10209 NULL_TREE);
10210 TREE_THIS_NOTRAP (var4) = 1;
10213 else
10214 var2 = val;
10217 gcc_assert (var2);
10219 else
10221 var2 = build_outer_var_ref (var, octx);
10222 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10224 var3 = maybe_lookup_decl (new_vard, octx);
10225 if (var3 == new_vard || var3 == NULL_TREE)
10226 var3 = NULL_TREE;
10227 else if (is_simd && octx->scan_exclusive && !input_phase)
10229 var4 = maybe_lookup_decl (var3, octx);
10230 if (var4 == var3 || var4 == NULL_TREE)
10232 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10234 var4 = var3;
10235 var3 = NULL_TREE;
10237 else
10238 var4 = NULL_TREE;
10242 if (is_simd
10243 && octx->scan_exclusive
10244 && !input_phase
10245 && var4 == NULL_TREE)
10246 var4 = create_tmp_var (TREE_TYPE (val));
10248 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10250 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10251 if (input_phase)
10253 if (var3)
10255 /* If we've added a separate identity element
10256 variable, copy it over into val. */
10257 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10258 var3);
10259 gimplify_and_add (x, &before);
10261 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10263 /* Otherwise, assign to it the identity element. */
10264 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10265 if (is_for)
10266 tseq = copy_gimple_seq_and_replace_locals (tseq);
10267 tree ref = build_outer_var_ref (var, octx);
10268 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10269 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10270 if (x)
10272 if (new_vard != new_var)
10273 val = build_fold_addr_expr_loc (clause_loc, val);
10274 SET_DECL_VALUE_EXPR (new_vard, val);
10276 SET_DECL_VALUE_EXPR (placeholder, ref);
10277 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10278 lower_omp (&tseq, octx);
10279 if (x)
10280 SET_DECL_VALUE_EXPR (new_vard, x);
10281 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10282 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10283 gimple_seq_add_seq (&before, tseq);
10284 if (is_simd)
10285 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10288 else if (is_simd)
10290 tree x;
10291 if (octx->scan_exclusive)
10293 tree v4 = unshare_expr (var4);
10294 tree v2 = unshare_expr (var2);
10295 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10296 gimplify_and_add (x, &before);
10298 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10299 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10300 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10301 tree vexpr = val;
10302 if (x && new_vard != new_var)
10303 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10304 if (x)
10305 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10306 SET_DECL_VALUE_EXPR (placeholder, var2);
10307 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10308 lower_omp (&tseq, octx);
10309 gimple_seq_add_seq (&before, tseq);
10310 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10311 if (x)
10312 SET_DECL_VALUE_EXPR (new_vard, x);
10313 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10314 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10315 if (octx->scan_inclusive)
10317 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10318 var2);
10319 gimplify_and_add (x, &before);
10321 else if (lane0 == NULL_TREE)
10323 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10324 var4);
10325 gimplify_and_add (x, &before);
10329 else
10331 if (input_phase)
10333 /* input phase. Set val to initializer before
10334 the body. */
10335 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10336 gimplify_assign (val, x, &before);
10338 else if (is_simd)
10340 /* scan phase. */
10341 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10342 if (code == MINUS_EXPR)
10343 code = PLUS_EXPR;
10345 tree x = build2 (code, TREE_TYPE (var2),
10346 unshare_expr (var2), unshare_expr (val));
10347 if (octx->scan_inclusive)
10349 gimplify_assign (unshare_expr (var2), x, &before);
10350 gimplify_assign (val, var2, &before);
10352 else
10354 gimplify_assign (unshare_expr (var4),
10355 unshare_expr (var2), &before);
10356 gimplify_assign (var2, x, &before);
10357 if (lane0 == NULL_TREE)
10358 gimplify_assign (val, var4, &before);
10362 if (octx->scan_exclusive && !input_phase && lane0)
10364 tree vexpr = unshare_expr (var4);
10365 TREE_OPERAND (vexpr, 1) = lane0;
10366 if (new_vard != new_var)
10367 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10368 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10372 if (is_simd && !is_for_simd)
10374 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10375 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10376 gsi_replace (gsi_p, gimple_build_nop (), true);
10377 return;
10379 lower_omp (gimple_omp_body_ptr (stmt), octx);
10380 if (before)
10382 gimple_stmt_iterator gsi = gsi_start (*gimple_omp_body_ptr (stmt));
10383 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10388 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10389 substitution of a couple of function calls. But in the NAMED case,
10390 requires that languages coordinate a symbol name. It is therefore
10391 best put here in common code. */
10393 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10395 static void
10396 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10398 tree block;
10399 tree name, lock, unlock;
10400 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10401 gbind *bind;
10402 location_t loc = gimple_location (stmt);
10403 gimple_seq tbody;
10405 name = gimple_omp_critical_name (stmt);
10406 if (name)
10408 tree decl;
10410 if (!critical_name_mutexes)
10411 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10413 tree *n = critical_name_mutexes->get (name);
10414 if (n == NULL)
10416 char *new_str;
10418 decl = create_tmp_var_raw (ptr_type_node);
10420 new_str = ACONCAT ((".gomp_critical_user_",
10421 IDENTIFIER_POINTER (name), NULL));
10422 DECL_NAME (decl) = get_identifier (new_str);
10423 TREE_PUBLIC (decl) = 1;
10424 TREE_STATIC (decl) = 1;
10425 DECL_COMMON (decl) = 1;
10426 DECL_ARTIFICIAL (decl) = 1;
10427 DECL_IGNORED_P (decl) = 1;
10429 varpool_node::finalize_decl (decl);
10431 critical_name_mutexes->put (name, decl);
10433 else
10434 decl = *n;
10436 /* If '#pragma omp critical' is inside offloaded region or
10437 inside function marked as offloadable, the symbol must be
10438 marked as offloadable too. */
10439 omp_context *octx;
10440 if (cgraph_node::get (current_function_decl)->offloadable)
10441 varpool_node::get_create (decl)->offloadable = 1;
10442 else
10443 for (octx = ctx->outer; octx; octx = octx->outer)
10444 if (is_gimple_omp_offloaded (octx->stmt))
10446 varpool_node::get_create (decl)->offloadable = 1;
10447 break;
10450 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10451 lock = build_call_expr_loc (loc, lock, 1,
10452 build_fold_addr_expr_loc (loc, decl));
10454 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10455 unlock = build_call_expr_loc (loc, unlock, 1,
10456 build_fold_addr_expr_loc (loc, decl));
10458 else
10460 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10461 lock = build_call_expr_loc (loc, lock, 0);
10463 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10464 unlock = build_call_expr_loc (loc, unlock, 0);
10467 push_gimplify_context ();
10469 block = make_node (BLOCK);
10470 bind = gimple_build_bind (NULL, NULL, block);
10471 gsi_replace (gsi_p, bind, true);
10472 gimple_bind_add_stmt (bind, stmt);
10474 tbody = gimple_bind_body (bind);
10475 gimplify_and_add (lock, &tbody);
10476 gimple_bind_set_body (bind, tbody);
10478 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10479 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10480 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10481 gimple_omp_set_body (stmt, NULL);
10483 tbody = gimple_bind_body (bind);
10484 gimplify_and_add (unlock, &tbody);
10485 gimple_bind_set_body (bind, tbody);
10487 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10489 pop_gimplify_context (bind);
10490 gimple_bind_append_vars (bind, ctx->block_vars);
10491 BLOCK_VARS (block) = gimple_bind_vars (bind);
10494 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10495 for a lastprivate clause. Given a loop control predicate of (V
10496 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10497 is appended to *DLIST, iterator initialization is appended to
10498 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10499 to be emitted in a critical section. */
10501 static void
10502 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10503 gimple_seq *dlist, gimple_seq *clist,
10504 struct omp_context *ctx)
10506 tree clauses, cond, vinit;
10507 enum tree_code cond_code;
10508 gimple_seq stmts;
10510 cond_code = fd->loop.cond_code;
10511 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10513 /* When possible, use a strict equality expression. This can let VRP
10514 type optimizations deduce the value and remove a copy. */
10515 if (tree_fits_shwi_p (fd->loop.step))
10517 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10518 if (step == 1 || step == -1)
10519 cond_code = EQ_EXPR;
10522 tree n2 = fd->loop.n2;
10523 if (fd->collapse > 1
10524 && TREE_CODE (n2) != INTEGER_CST
10525 && gimple_omp_for_combined_into_p (fd->for_stmt))
10527 struct omp_context *taskreg_ctx = NULL;
10528 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10530 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10531 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10532 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10534 if (gimple_omp_for_combined_into_p (gfor))
10536 gcc_assert (ctx->outer->outer
10537 && is_parallel_ctx (ctx->outer->outer));
10538 taskreg_ctx = ctx->outer->outer;
10540 else
10542 struct omp_for_data outer_fd;
10543 omp_extract_for_data (gfor, &outer_fd, NULL);
10544 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10547 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10548 taskreg_ctx = ctx->outer->outer;
10550 else if (is_taskreg_ctx (ctx->outer))
10551 taskreg_ctx = ctx->outer;
10552 if (taskreg_ctx)
10554 int i;
10555 tree taskreg_clauses
10556 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10557 tree innerc = omp_find_clause (taskreg_clauses,
10558 OMP_CLAUSE__LOOPTEMP_);
10559 gcc_assert (innerc);
10560 int count = fd->collapse;
10561 if (fd->non_rect
10562 && fd->last_nonrect == fd->first_nonrect + 1)
10563 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10564 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10565 count += 4;
10566 for (i = 0; i < count; i++)
10568 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10569 OMP_CLAUSE__LOOPTEMP_);
10570 gcc_assert (innerc);
10572 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10573 OMP_CLAUSE__LOOPTEMP_);
10574 if (innerc)
10575 n2 = fold_convert (TREE_TYPE (n2),
10576 lookup_decl (OMP_CLAUSE_DECL (innerc),
10577 taskreg_ctx));
10580 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10582 clauses = gimple_omp_for_clauses (fd->for_stmt);
10583 stmts = NULL;
10584 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10585 if (!gimple_seq_empty_p (stmts))
10587 gimple_seq_add_seq (&stmts, *dlist);
10588 *dlist = stmts;
10590 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10591 vinit = fd->loop.n1;
10592 if (cond_code == EQ_EXPR
10593 && tree_fits_shwi_p (fd->loop.n2)
10594 && ! integer_zerop (fd->loop.n2))
10595 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10596 else
10597 vinit = unshare_expr (vinit);
10599 /* Initialize the iterator variable, so that threads that don't execute
10600 any iterations don't execute the lastprivate clauses by accident. */
10601 gimplify_assign (fd->loop.v, vinit, body_p);
10605 /* OpenACC privatization.
10607 Or, in other words, *sharing* at the respective OpenACC level of
10608 parallelism.
10610 From a correctness perspective, a non-addressable variable can't be accessed
10611 outside the current thread, so it can go in a (faster than shared memory)
10612 register -- though that register may need to be broadcast in some
10613 circumstances. A variable can only meaningfully be "shared" across workers
10614 or vector lanes if its address is taken, e.g. by a call to an atomic
10615 builtin.
10617 From an optimisation perspective, the answer might be fuzzier: maybe
10618 sometimes, using shared memory directly would be faster than
10619 broadcasting. */
10621 static void
10622 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10623 const location_t loc, const tree c,
10624 const tree decl)
10626 const dump_user_location_t d_u_loc
10627 = dump_user_location_t::from_location_t (loc);
10628 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10629 #if __GNUC__ >= 10
10630 # pragma GCC diagnostic push
10631 # pragma GCC diagnostic ignored "-Wformat"
10632 #endif
10633 dump_printf_loc (l_dump_flags, d_u_loc,
10634 "variable %<%T%> ", decl);
10635 #if __GNUC__ >= 10
10636 # pragma GCC diagnostic pop
10637 #endif
10638 if (c)
10639 dump_printf (l_dump_flags,
10640 "in %qs clause ",
10641 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10642 else
10643 dump_printf (l_dump_flags,
10644 "declared in block ");
10647 static bool
10648 oacc_privatization_candidate_p (const location_t loc, const tree c,
10649 const tree decl)
10651 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10653 /* There is some differentiation depending on block vs. clause. */
10654 bool block = !c;
10656 bool res = true;
10658 if (res && !VAR_P (decl))
10660 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10661 privatized into a new VAR_DECL. */
10662 gcc_checking_assert (TREE_CODE (decl) != PARM_DECL);
10664 res = false;
10666 if (dump_enabled_p ())
10668 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10669 dump_printf (l_dump_flags,
10670 "potentially has improper OpenACC privatization level: %qs\n",
10671 get_tree_code_name (TREE_CODE (decl)));
10675 if (res && block && TREE_STATIC (decl))
10677 res = false;
10679 if (dump_enabled_p ())
10681 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10682 dump_printf (l_dump_flags,
10683 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10684 "static");
10688 if (res && block && DECL_EXTERNAL (decl))
10690 res = false;
10692 if (dump_enabled_p ())
10694 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10695 dump_printf (l_dump_flags,
10696 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10697 "external");
10701 if (res && !TREE_ADDRESSABLE (decl))
10703 res = false;
10705 if (dump_enabled_p ())
10707 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10708 dump_printf (l_dump_flags,
10709 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10710 "not addressable");
10714 /* If an artificial variable has been added to a bind, e.g.
10715 a compiler-generated temporary structure used by the Fortran front-end, do
10716 not consider it as a privatization candidate. Note that variables on
10717 the stack are private per-thread by default: making them "gang-private"
10718 for OpenACC actually means to share a single instance of a variable
10719 amongst all workers and threads spawned within each gang.
10720 At present, no compiler-generated artificial variables require such
10721 sharing semantics, so this is safe. */
10723 if (res && block && DECL_ARTIFICIAL (decl))
10725 res = false;
10727 if (dump_enabled_p ())
10729 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10730 dump_printf (l_dump_flags,
10731 "isn%'t candidate for adjusting OpenACC privatization "
10732 "level: %s\n", "artificial");
10736 if (res)
10738 if (dump_enabled_p ())
10740 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10741 dump_printf (l_dump_flags,
10742 "is candidate for adjusting OpenACC privatization level\n");
10746 if (dump_file && (dump_flags & TDF_DETAILS))
10748 print_generic_decl (dump_file, decl, dump_flags);
10749 fprintf (dump_file, "\n");
10752 return res;
10755 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10756 CTX. */
10758 static void
10759 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10761 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10762 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10764 tree decl = OMP_CLAUSE_DECL (c);
10766 tree new_decl = lookup_decl (decl, ctx);
10768 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c,
10769 new_decl))
10770 continue;
10772 gcc_checking_assert
10773 (!ctx->oacc_privatization_candidates.contains (new_decl));
10774 ctx->oacc_privatization_candidates.safe_push (new_decl);
10778 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10779 CTX. */
10781 static void
10782 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10784 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10786 tree new_decl = lookup_decl (decl, ctx);
10787 gcc_checking_assert (new_decl == decl);
10789 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL,
10790 new_decl))
10791 continue;
10793 gcc_checking_assert
10794 (!ctx->oacc_privatization_candidates.contains (new_decl));
10795 ctx->oacc_privatization_candidates.safe_push (new_decl);
10799 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10801 static tree
10802 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10803 struct walk_stmt_info *wi)
10805 gimple *stmt = gsi_stmt (*gsi_p);
10807 *handled_ops_p = true;
10808 switch (gimple_code (stmt))
10810 WALK_SUBSTMTS;
10812 case GIMPLE_OMP_FOR:
10813 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10814 && gimple_omp_for_combined_into_p (stmt))
10815 *handled_ops_p = false;
10816 break;
10818 case GIMPLE_OMP_SCAN:
10819 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10820 return integer_zero_node;
10821 default:
10822 break;
10824 return NULL;
10827 /* Helper function for lower_omp_for, add transformations for a worksharing
10828 loop with scan directives inside of it.
10829 For worksharing loop not combined with simd, transform:
10830 #pragma omp for reduction(inscan,+:r) private(i)
10831 for (i = 0; i < n; i = i + 1)
10834 update (r);
10836 #pragma omp scan inclusive(r)
10838 use (r);
10842 into two worksharing loops + code to merge results:
10844 num_threads = omp_get_num_threads ();
10845 thread_num = omp_get_thread_num ();
10846 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10847 <D.2099>:
10848 var2 = r;
10849 goto <D.2101>;
10850 <D.2100>:
10851 // For UDRs this is UDR init, or if ctors are needed, copy from
10852 // var3 that has been constructed to contain the neutral element.
10853 var2 = 0;
10854 <D.2101>:
10855 ivar = 0;
10856 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10857 // a shared array with num_threads elements and rprivb to a local array
10858 // number of elements equal to the number of (contiguous) iterations the
10859 // current thread will perform. controlb and controlp variables are
10860 // temporaries to handle deallocation of rprivb at the end of second
10861 // GOMP_FOR.
10862 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10863 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10864 for (i = 0; i < n; i = i + 1)
10867 // For UDRs this is UDR init or copy from var3.
10868 r = 0;
10869 // This is the input phase from user code.
10870 update (r);
10873 // For UDRs this is UDR merge.
10874 var2 = var2 + r;
10875 // Rather than handing it over to the user, save to local thread's
10876 // array.
10877 rprivb[ivar] = var2;
10878 // For exclusive scan, the above two statements are swapped.
10879 ivar = ivar + 1;
10882 // And remember the final value from this thread's into the shared
10883 // rpriva array.
10884 rpriva[(sizetype) thread_num] = var2;
10885 // If more than one thread, compute using Work-Efficient prefix sum
10886 // the inclusive parallel scan of the rpriva array.
10887 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10888 <D.2102>:
10889 GOMP_barrier ();
10890 down = 0;
10891 k = 1;
10892 num_threadsu = (unsigned int) num_threads;
10893 thread_numup1 = (unsigned int) thread_num + 1;
10894 <D.2108>:
10895 twok = k << 1;
10896 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10897 <D.2110>:
10898 down = 4294967295;
10899 k = k >> 1;
10900 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10901 <D.2112>:
10902 k = k >> 1;
10903 <D.2111>:
10904 twok = k << 1;
10905 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10906 mul = REALPART_EXPR <cplx>;
10907 ovf = IMAGPART_EXPR <cplx>;
10908 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10909 <D.2116>:
10910 andv = k & down;
10911 andvm1 = andv + 4294967295;
10912 l = mul + andvm1;
10913 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10914 <D.2120>:
10915 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10916 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10917 rpriva[l] = rpriva[l - k] + rpriva[l];
10918 <D.2117>:
10919 if (down == 0) goto <D.2121>; else goto <D.2122>;
10920 <D.2121>:
10921 k = k << 1;
10922 goto <D.2123>;
10923 <D.2122>:
10924 k = k >> 1;
10925 <D.2123>:
10926 GOMP_barrier ();
10927 if (k != 0) goto <D.2108>; else goto <D.2103>;
10928 <D.2103>:
10929 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10930 <D.2124>:
10931 // For UDRs this is UDR init or copy from var3.
10932 var2 = 0;
10933 goto <D.2126>;
10934 <D.2125>:
10935 var2 = rpriva[thread_num - 1];
10936 <D.2126>:
10937 ivar = 0;
10938 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10939 reduction(inscan,+:r) private(i)
10940 for (i = 0; i < n; i = i + 1)
10943 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10944 r = var2 + rprivb[ivar];
10947 // This is the scan phase from user code.
10948 use (r);
10949 // Plus a bump of the iterator.
10950 ivar = ivar + 1;
10952 } */
10954 static void
10955 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10956 struct omp_for_data *fd, omp_context *ctx)
10958 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10959 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10961 gimple_seq body = gimple_omp_body (stmt);
10962 gimple_stmt_iterator input1_gsi = gsi_none ();
10963 struct walk_stmt_info wi;
10964 memset (&wi, 0, sizeof (wi));
10965 wi.val_only = true;
10966 wi.info = (void *) &input1_gsi;
10967 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10968 gcc_assert (!gsi_end_p (input1_gsi));
10970 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10971 gimple_stmt_iterator gsi = input1_gsi;
10972 gsi_next (&gsi);
10973 gimple_stmt_iterator scan1_gsi = gsi;
10974 gimple *scan_stmt1 = gsi_stmt (gsi);
10975 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10977 gimple_seq input_body = gimple_omp_body (input_stmt1);
10978 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10979 gimple_omp_set_body (input_stmt1, NULL);
10980 gimple_omp_set_body (scan_stmt1, NULL);
10981 gimple_omp_set_body (stmt, NULL);
10983 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10984 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10985 gimple_omp_set_body (stmt, body);
10986 gimple_omp_set_body (input_stmt1, input_body);
10988 gimple_stmt_iterator input2_gsi = gsi_none ();
10989 memset (&wi, 0, sizeof (wi));
10990 wi.val_only = true;
10991 wi.info = (void *) &input2_gsi;
10992 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10993 gcc_assert (!gsi_end_p (input2_gsi));
10995 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10996 gsi = input2_gsi;
10997 gsi_next (&gsi);
10998 gimple_stmt_iterator scan2_gsi = gsi;
10999 gimple *scan_stmt2 = gsi_stmt (gsi);
11000 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
11001 gimple_omp_set_body (scan_stmt2, scan_body);
11003 gimple_stmt_iterator input3_gsi = gsi_none ();
11004 gimple_stmt_iterator scan3_gsi = gsi_none ();
11005 gimple_stmt_iterator input4_gsi = gsi_none ();
11006 gimple_stmt_iterator scan4_gsi = gsi_none ();
11007 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
11008 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
11009 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
11010 if (is_for_simd)
11012 memset (&wi, 0, sizeof (wi));
11013 wi.val_only = true;
11014 wi.info = (void *) &input3_gsi;
11015 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
11016 gcc_assert (!gsi_end_p (input3_gsi));
11018 input_stmt3 = gsi_stmt (input3_gsi);
11019 gsi = input3_gsi;
11020 gsi_next (&gsi);
11021 scan3_gsi = gsi;
11022 scan_stmt3 = gsi_stmt (gsi);
11023 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
11025 memset (&wi, 0, sizeof (wi));
11026 wi.val_only = true;
11027 wi.info = (void *) &input4_gsi;
11028 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
11029 gcc_assert (!gsi_end_p (input4_gsi));
11031 input_stmt4 = gsi_stmt (input4_gsi);
11032 gsi = input4_gsi;
11033 gsi_next (&gsi);
11034 scan4_gsi = gsi;
11035 scan_stmt4 = gsi_stmt (gsi);
11036 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
11038 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
11039 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
11042 tree num_threads = create_tmp_var (integer_type_node);
11043 tree thread_num = create_tmp_var (integer_type_node);
11044 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
11045 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
11046 gimple *g = gimple_build_call (nthreads_decl, 0);
11047 gimple_call_set_lhs (g, num_threads);
11048 gimple_seq_add_stmt (body_p, g);
11049 g = gimple_build_call (threadnum_decl, 0);
11050 gimple_call_set_lhs (g, thread_num);
11051 gimple_seq_add_stmt (body_p, g);
11053 tree ivar = create_tmp_var (sizetype);
11054 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
11055 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
11056 tree k = create_tmp_var (unsigned_type_node);
11057 tree l = create_tmp_var (unsigned_type_node);
11059 gimple_seq clist = NULL, mdlist = NULL;
11060 gimple_seq thr01_list = NULL, thrn1_list = NULL;
11061 gimple_seq thr02_list = NULL, thrn2_list = NULL;
11062 gimple_seq scan1_list = NULL, input2_list = NULL;
11063 gimple_seq last_list = NULL, reduc_list = NULL;
11064 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11065 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11066 && OMP_CLAUSE_REDUCTION_INSCAN (c))
11068 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11069 tree var = OMP_CLAUSE_DECL (c);
11070 tree new_var = lookup_decl (var, ctx);
11071 tree var3 = NULL_TREE;
11072 tree new_vard = new_var;
11073 if (omp_privatize_by_reference (var))
11074 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
11075 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11077 var3 = maybe_lookup_decl (new_vard, ctx);
11078 if (var3 == new_vard)
11079 var3 = NULL_TREE;
11082 tree ptype = build_pointer_type (TREE_TYPE (new_var));
11083 tree rpriva = create_tmp_var (ptype);
11084 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
11085 OMP_CLAUSE_DECL (nc) = rpriva;
11086 *cp1 = nc;
11087 cp1 = &OMP_CLAUSE_CHAIN (nc);
11089 tree rprivb = create_tmp_var (ptype);
11090 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
11091 OMP_CLAUSE_DECL (nc) = rprivb;
11092 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
11093 *cp1 = nc;
11094 cp1 = &OMP_CLAUSE_CHAIN (nc);
11096 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
11097 if (new_vard != new_var)
11098 TREE_ADDRESSABLE (var2) = 1;
11099 gimple_add_tmp_var (var2);
11101 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
11102 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11103 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11104 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11105 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
11107 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
11108 thread_num, integer_minus_one_node);
11109 x = fold_convert_loc (clause_loc, sizetype, x);
11110 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11111 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11112 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11113 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
11115 x = fold_convert_loc (clause_loc, sizetype, l);
11116 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11117 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11118 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11119 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
11121 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
11122 x = fold_convert_loc (clause_loc, sizetype, x);
11123 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11124 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11125 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11126 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
11128 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
11129 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11130 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
11131 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
11133 tree var4 = is_for_simd ? new_var : var2;
11134 tree var5 = NULL_TREE, var6 = NULL_TREE;
11135 if (is_for_simd)
11137 var5 = lookup_decl (var, input_simd_ctx);
11138 var6 = lookup_decl (var, scan_simd_ctx);
11139 if (new_vard != new_var)
11141 var5 = build_simple_mem_ref_loc (clause_loc, var5);
11142 var6 = build_simple_mem_ref_loc (clause_loc, var6);
11145 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11147 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
11148 tree val = var2;
11150 x = lang_hooks.decls.omp_clause_default_ctor
11151 (c, var2, build_outer_var_ref (var, ctx));
11152 if (x)
11153 gimplify_and_add (x, &clist);
11155 x = build_outer_var_ref (var, ctx);
11156 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
11158 gimplify_and_add (x, &thr01_list);
11160 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
11161 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
11162 if (var3)
11164 x = unshare_expr (var4);
11165 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11166 gimplify_and_add (x, &thrn1_list);
11167 x = unshare_expr (var4);
11168 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11169 gimplify_and_add (x, &thr02_list);
11171 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
11173 /* Otherwise, assign to it the identity element. */
11174 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11175 tseq = copy_gimple_seq_and_replace_locals (tseq);
11176 if (!is_for_simd)
11178 if (new_vard != new_var)
11179 val = build_fold_addr_expr_loc (clause_loc, val);
11180 SET_DECL_VALUE_EXPR (new_vard, val);
11181 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11183 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11184 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11185 lower_omp (&tseq, ctx);
11186 gimple_seq_add_seq (&thrn1_list, tseq);
11187 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11188 lower_omp (&tseq, ctx);
11189 gimple_seq_add_seq (&thr02_list, tseq);
11190 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11191 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11192 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11193 if (y)
11194 SET_DECL_VALUE_EXPR (new_vard, y);
11195 else
11197 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11198 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11202 x = unshare_expr (var4);
11203 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11204 gimplify_and_add (x, &thrn2_list);
11206 if (is_for_simd)
11208 x = unshare_expr (rprivb_ref);
11209 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11210 gimplify_and_add (x, &scan1_list);
11212 else
11214 if (ctx->scan_exclusive)
11216 x = unshare_expr (rprivb_ref);
11217 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11218 gimplify_and_add (x, &scan1_list);
11221 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11222 tseq = copy_gimple_seq_and_replace_locals (tseq);
11223 SET_DECL_VALUE_EXPR (placeholder, var2);
11224 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11225 lower_omp (&tseq, ctx);
11226 gimple_seq_add_seq (&scan1_list, tseq);
11228 if (ctx->scan_inclusive)
11230 x = unshare_expr (rprivb_ref);
11231 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11232 gimplify_and_add (x, &scan1_list);
11236 x = unshare_expr (rpriva_ref);
11237 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11238 unshare_expr (var4));
11239 gimplify_and_add (x, &mdlist);
11241 x = unshare_expr (is_for_simd ? var6 : new_var);
11242 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11243 gimplify_and_add (x, &input2_list);
11245 val = rprivb_ref;
11246 if (new_vard != new_var)
11247 val = build_fold_addr_expr_loc (clause_loc, val);
11249 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11250 tseq = copy_gimple_seq_and_replace_locals (tseq);
11251 SET_DECL_VALUE_EXPR (new_vard, val);
11252 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11253 if (is_for_simd)
11255 SET_DECL_VALUE_EXPR (placeholder, var6);
11256 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11258 else
11259 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11260 lower_omp (&tseq, ctx);
11261 if (y)
11262 SET_DECL_VALUE_EXPR (new_vard, y);
11263 else
11265 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11266 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11268 if (!is_for_simd)
11270 SET_DECL_VALUE_EXPR (placeholder, new_var);
11271 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11272 lower_omp (&tseq, ctx);
11274 gimple_seq_add_seq (&input2_list, tseq);
11276 x = build_outer_var_ref (var, ctx);
11277 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11278 gimplify_and_add (x, &last_list);
11280 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11281 gimplify_and_add (x, &reduc_list);
11282 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11283 tseq = copy_gimple_seq_and_replace_locals (tseq);
11284 val = rprival_ref;
11285 if (new_vard != new_var)
11286 val = build_fold_addr_expr_loc (clause_loc, val);
11287 SET_DECL_VALUE_EXPR (new_vard, val);
11288 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11289 SET_DECL_VALUE_EXPR (placeholder, var2);
11290 lower_omp (&tseq, ctx);
11291 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11292 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11293 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11294 if (y)
11295 SET_DECL_VALUE_EXPR (new_vard, y);
11296 else
11298 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11299 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11301 gimple_seq_add_seq (&reduc_list, tseq);
11302 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11303 gimplify_and_add (x, &reduc_list);
11305 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11306 if (x)
11307 gimplify_and_add (x, dlist);
11309 else
11311 x = build_outer_var_ref (var, ctx);
11312 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11314 x = omp_reduction_init (c, TREE_TYPE (new_var));
11315 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11316 &thrn1_list);
11317 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11319 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11321 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11322 if (code == MINUS_EXPR)
11323 code = PLUS_EXPR;
11325 if (is_for_simd)
11326 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11327 else
11329 if (ctx->scan_exclusive)
11330 gimplify_assign (unshare_expr (rprivb_ref), var2,
11331 &scan1_list);
11332 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11333 gimplify_assign (var2, x, &scan1_list);
11334 if (ctx->scan_inclusive)
11335 gimplify_assign (unshare_expr (rprivb_ref), var2,
11336 &scan1_list);
11339 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11340 &mdlist);
11342 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11343 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11345 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11346 &last_list);
11348 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11349 unshare_expr (rprival_ref));
11350 gimplify_assign (rprival_ref, x, &reduc_list);
11354 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11355 gimple_seq_add_stmt (&scan1_list, g);
11356 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11357 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11358 ? scan_stmt4 : scan_stmt2), g);
11360 tree controlb = create_tmp_var (boolean_type_node);
11361 tree controlp = create_tmp_var (ptr_type_node);
11362 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11363 OMP_CLAUSE_DECL (nc) = controlb;
11364 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11365 *cp1 = nc;
11366 cp1 = &OMP_CLAUSE_CHAIN (nc);
11367 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11368 OMP_CLAUSE_DECL (nc) = controlp;
11369 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11370 *cp1 = nc;
11371 cp1 = &OMP_CLAUSE_CHAIN (nc);
11372 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11373 OMP_CLAUSE_DECL (nc) = controlb;
11374 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11375 *cp2 = nc;
11376 cp2 = &OMP_CLAUSE_CHAIN (nc);
11377 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11378 OMP_CLAUSE_DECL (nc) = controlp;
11379 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11380 *cp2 = nc;
11381 cp2 = &OMP_CLAUSE_CHAIN (nc);
11383 *cp1 = gimple_omp_for_clauses (stmt);
11384 gimple_omp_for_set_clauses (stmt, new_clauses1);
11385 *cp2 = gimple_omp_for_clauses (new_stmt);
11386 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11388 if (is_for_simd)
11390 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11391 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11393 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11394 GSI_SAME_STMT);
11395 gsi_remove (&input3_gsi, true);
11396 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11397 GSI_SAME_STMT);
11398 gsi_remove (&scan3_gsi, true);
11399 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11400 GSI_SAME_STMT);
11401 gsi_remove (&input4_gsi, true);
11402 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11403 GSI_SAME_STMT);
11404 gsi_remove (&scan4_gsi, true);
11406 else
11408 gimple_omp_set_body (scan_stmt1, scan1_list);
11409 gimple_omp_set_body (input_stmt2, input2_list);
11412 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11413 GSI_SAME_STMT);
11414 gsi_remove (&input1_gsi, true);
11415 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11416 GSI_SAME_STMT);
11417 gsi_remove (&scan1_gsi, true);
11418 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11419 GSI_SAME_STMT);
11420 gsi_remove (&input2_gsi, true);
11421 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11422 GSI_SAME_STMT);
11423 gsi_remove (&scan2_gsi, true);
11425 gimple_seq_add_seq (body_p, clist);
11427 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11428 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11429 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11430 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11431 gimple_seq_add_stmt (body_p, g);
11432 g = gimple_build_label (lab1);
11433 gimple_seq_add_stmt (body_p, g);
11434 gimple_seq_add_seq (body_p, thr01_list);
11435 g = gimple_build_goto (lab3);
11436 gimple_seq_add_stmt (body_p, g);
11437 g = gimple_build_label (lab2);
11438 gimple_seq_add_stmt (body_p, g);
11439 gimple_seq_add_seq (body_p, thrn1_list);
11440 g = gimple_build_label (lab3);
11441 gimple_seq_add_stmt (body_p, g);
11443 g = gimple_build_assign (ivar, size_zero_node);
11444 gimple_seq_add_stmt (body_p, g);
11446 gimple_seq_add_stmt (body_p, stmt);
11447 gimple_seq_add_seq (body_p, body);
11448 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11449 fd->loop.v));
11451 g = gimple_build_omp_return (true);
11452 gimple_seq_add_stmt (body_p, g);
11453 gimple_seq_add_seq (body_p, mdlist);
11455 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11456 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11457 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11458 gimple_seq_add_stmt (body_p, g);
11459 g = gimple_build_label (lab1);
11460 gimple_seq_add_stmt (body_p, g);
11462 g = omp_build_barrier (NULL);
11463 gimple_seq_add_stmt (body_p, g);
11465 tree down = create_tmp_var (unsigned_type_node);
11466 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11467 gimple_seq_add_stmt (body_p, g);
11469 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11470 gimple_seq_add_stmt (body_p, g);
11472 tree num_threadsu = create_tmp_var (unsigned_type_node);
11473 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11474 gimple_seq_add_stmt (body_p, g);
11476 tree thread_numu = create_tmp_var (unsigned_type_node);
11477 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11478 gimple_seq_add_stmt (body_p, g);
11480 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11481 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11482 build_int_cst (unsigned_type_node, 1));
11483 gimple_seq_add_stmt (body_p, g);
11485 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11486 g = gimple_build_label (lab3);
11487 gimple_seq_add_stmt (body_p, g);
11489 tree twok = create_tmp_var (unsigned_type_node);
11490 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11491 gimple_seq_add_stmt (body_p, g);
11493 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11494 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11495 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11496 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11497 gimple_seq_add_stmt (body_p, g);
11498 g = gimple_build_label (lab4);
11499 gimple_seq_add_stmt (body_p, g);
11500 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11501 gimple_seq_add_stmt (body_p, g);
11502 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11503 gimple_seq_add_stmt (body_p, g);
11505 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11506 gimple_seq_add_stmt (body_p, g);
11507 g = gimple_build_label (lab6);
11508 gimple_seq_add_stmt (body_p, g);
11510 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11511 gimple_seq_add_stmt (body_p, g);
11513 g = gimple_build_label (lab5);
11514 gimple_seq_add_stmt (body_p, g);
11516 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11517 gimple_seq_add_stmt (body_p, g);
11519 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11520 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11521 gimple_call_set_lhs (g, cplx);
11522 gimple_seq_add_stmt (body_p, g);
11523 tree mul = create_tmp_var (unsigned_type_node);
11524 g = gimple_build_assign (mul, REALPART_EXPR,
11525 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11526 gimple_seq_add_stmt (body_p, g);
11527 tree ovf = create_tmp_var (unsigned_type_node);
11528 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11529 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11530 gimple_seq_add_stmt (body_p, g);
11532 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11533 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11534 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11535 lab7, lab8);
11536 gimple_seq_add_stmt (body_p, g);
11537 g = gimple_build_label (lab7);
11538 gimple_seq_add_stmt (body_p, g);
11540 tree andv = create_tmp_var (unsigned_type_node);
11541 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11542 gimple_seq_add_stmt (body_p, g);
11543 tree andvm1 = create_tmp_var (unsigned_type_node);
11544 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11545 build_minus_one_cst (unsigned_type_node));
11546 gimple_seq_add_stmt (body_p, g);
11548 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11549 gimple_seq_add_stmt (body_p, g);
11551 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11552 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11553 gimple_seq_add_stmt (body_p, g);
11554 g = gimple_build_label (lab9);
11555 gimple_seq_add_stmt (body_p, g);
11556 gimple_seq_add_seq (body_p, reduc_list);
11557 g = gimple_build_label (lab8);
11558 gimple_seq_add_stmt (body_p, g);
11560 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11561 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11562 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11563 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11564 lab10, lab11);
11565 gimple_seq_add_stmt (body_p, g);
11566 g = gimple_build_label (lab10);
11567 gimple_seq_add_stmt (body_p, g);
11568 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11569 gimple_seq_add_stmt (body_p, g);
11570 g = gimple_build_goto (lab12);
11571 gimple_seq_add_stmt (body_p, g);
11572 g = gimple_build_label (lab11);
11573 gimple_seq_add_stmt (body_p, g);
11574 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11575 gimple_seq_add_stmt (body_p, g);
11576 g = gimple_build_label (lab12);
11577 gimple_seq_add_stmt (body_p, g);
11579 g = omp_build_barrier (NULL);
11580 gimple_seq_add_stmt (body_p, g);
11582 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11583 lab3, lab2);
11584 gimple_seq_add_stmt (body_p, g);
11586 g = gimple_build_label (lab2);
11587 gimple_seq_add_stmt (body_p, g);
11589 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11590 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11591 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11592 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11593 gimple_seq_add_stmt (body_p, g);
11594 g = gimple_build_label (lab1);
11595 gimple_seq_add_stmt (body_p, g);
11596 gimple_seq_add_seq (body_p, thr02_list);
11597 g = gimple_build_goto (lab3);
11598 gimple_seq_add_stmt (body_p, g);
11599 g = gimple_build_label (lab2);
11600 gimple_seq_add_stmt (body_p, g);
11601 gimple_seq_add_seq (body_p, thrn2_list);
11602 g = gimple_build_label (lab3);
11603 gimple_seq_add_stmt (body_p, g);
11605 g = gimple_build_assign (ivar, size_zero_node);
11606 gimple_seq_add_stmt (body_p, g);
11607 gimple_seq_add_stmt (body_p, new_stmt);
11608 gimple_seq_add_seq (body_p, new_body);
11610 gimple_seq new_dlist = NULL;
11611 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11612 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11613 tree num_threadsm1 = create_tmp_var (integer_type_node);
11614 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11615 integer_minus_one_node);
11616 gimple_seq_add_stmt (&new_dlist, g);
11617 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11618 gimple_seq_add_stmt (&new_dlist, g);
11619 g = gimple_build_label (lab1);
11620 gimple_seq_add_stmt (&new_dlist, g);
11621 gimple_seq_add_seq (&new_dlist, last_list);
11622 g = gimple_build_label (lab2);
11623 gimple_seq_add_stmt (&new_dlist, g);
11624 gimple_seq_add_seq (&new_dlist, *dlist);
11625 *dlist = new_dlist;
11628 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11629 the addresses of variables to be made private at the surrounding
11630 parallelism level. Such functions appear in the gimple code stream in two
11631 forms, e.g. for a partitioned loop:
11633 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11634 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11635 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11636 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11638 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11639 not as part of a HEAD_MARK sequence:
11641 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11643 For such stand-alone appearances, the 3rd argument is always 0, denoting
11644 gang partitioning. */
11646 static gcall *
11647 lower_oacc_private_marker (omp_context *ctx)
11649 if (ctx->oacc_privatization_candidates.length () == 0)
11650 return NULL;
11652 auto_vec<tree, 5> args;
11654 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11655 args.quick_push (integer_zero_node);
11656 args.quick_push (integer_minus_one_node);
11658 int i;
11659 tree decl;
11660 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11662 gcc_checking_assert (TREE_ADDRESSABLE (decl));
11663 tree addr = build_fold_addr_expr (decl);
11664 args.safe_push (addr);
11667 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11670 /* Lower code for an OMP loop directive. */
11672 static void
11673 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11675 tree *rhs_p, block;
11676 struct omp_for_data fd, *fdp = NULL;
11677 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11678 gbind *new_stmt;
11679 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11680 gimple_seq cnt_list = NULL, clist = NULL;
11681 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11682 size_t i;
11684 push_gimplify_context ();
11686 if (is_gimple_omp_oacc (ctx->stmt))
11687 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11689 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11691 block = make_node (BLOCK);
11692 new_stmt = gimple_build_bind (NULL, NULL, block);
11693 /* Replace at gsi right away, so that 'stmt' is no member
11694 of a sequence anymore as we're going to add to a different
11695 one below. */
11696 gsi_replace (gsi_p, new_stmt, true);
11698 /* Move declaration of temporaries in the loop body before we make
11699 it go away. */
11700 omp_for_body = gimple_omp_body (stmt);
11701 if (!gimple_seq_empty_p (omp_for_body)
11702 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11704 gbind *inner_bind
11705 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11706 tree vars = gimple_bind_vars (inner_bind);
11707 if (is_gimple_omp_oacc (ctx->stmt))
11708 oacc_privatization_scan_decl_chain (ctx, vars);
11709 gimple_bind_append_vars (new_stmt, vars);
11710 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11711 keep them on the inner_bind and it's block. */
11712 gimple_bind_set_vars (inner_bind, NULL_TREE);
11713 if (gimple_bind_block (inner_bind))
11714 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11717 if (gimple_omp_for_combined_into_p (stmt))
11719 omp_extract_for_data (stmt, &fd, NULL);
11720 fdp = &fd;
11722 /* We need two temporaries with fd.loop.v type (istart/iend)
11723 and then (fd.collapse - 1) temporaries with the same
11724 type for count2 ... countN-1 vars if not constant. */
11725 size_t count = 2;
11726 tree type = fd.iter_type;
11727 if (fd.collapse > 1
11728 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11729 count += fd.collapse - 1;
11730 size_t count2 = 0;
11731 tree type2 = NULL_TREE;
11732 bool taskreg_for
11733 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11734 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11735 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11736 tree simtc = NULL;
11737 tree clauses = *pc;
11738 if (fd.collapse > 1
11739 && fd.non_rect
11740 && fd.last_nonrect == fd.first_nonrect + 1
11741 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11742 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11743 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11745 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11746 type2 = TREE_TYPE (v);
11747 count++;
11748 count2 = 3;
11750 if (taskreg_for)
11751 outerc
11752 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11753 OMP_CLAUSE__LOOPTEMP_);
11754 if (ctx->simt_stmt)
11755 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11756 OMP_CLAUSE__LOOPTEMP_);
11757 for (i = 0; i < count + count2; i++)
11759 tree temp;
11760 if (taskreg_for)
11762 gcc_assert (outerc);
11763 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11764 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11765 OMP_CLAUSE__LOOPTEMP_);
11767 else
11769 /* If there are 2 adjacent SIMD stmts, one with _simt_
11770 clause, another without, make sure they have the same
11771 decls in _looptemp_ clauses, because the outer stmt
11772 they are combined into will look up just one inner_stmt. */
11773 if (ctx->simt_stmt)
11774 temp = OMP_CLAUSE_DECL (simtc);
11775 else
11776 temp = create_tmp_var (i >= count ? type2 : type);
11777 insert_decl_map (&ctx->outer->cb, temp, temp);
11779 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11780 OMP_CLAUSE_DECL (*pc) = temp;
11781 pc = &OMP_CLAUSE_CHAIN (*pc);
11782 if (ctx->simt_stmt)
11783 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11784 OMP_CLAUSE__LOOPTEMP_);
11786 *pc = clauses;
11789 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11790 dlist = NULL;
11791 body = NULL;
11792 tree rclauses
11793 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11794 OMP_CLAUSE_REDUCTION);
11795 tree rtmp = NULL_TREE;
11796 if (rclauses)
11798 tree type = build_pointer_type (pointer_sized_int_node);
11799 tree temp = create_tmp_var (type);
11800 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11801 OMP_CLAUSE_DECL (c) = temp;
11802 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11803 gimple_omp_for_set_clauses (stmt, c);
11804 lower_omp_task_reductions (ctx, OMP_FOR,
11805 gimple_omp_for_clauses (stmt),
11806 &tred_ilist, &tred_dlist);
11807 rclauses = c;
11808 rtmp = make_ssa_name (type);
11809 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11812 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11813 ctx);
11815 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11816 fdp);
11817 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11818 gimple_omp_for_pre_body (stmt));
11820 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11822 gcall *private_marker = NULL;
11823 if (is_gimple_omp_oacc (ctx->stmt)
11824 && !gimple_seq_empty_p (omp_for_body))
11825 private_marker = lower_oacc_private_marker (ctx);
11827 /* Lower the header expressions. At this point, we can assume that
11828 the header is of the form:
11830 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11832 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11833 using the .omp_data_s mapping, if needed. */
11834 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11836 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11837 if (TREE_CODE (*rhs_p) == TREE_VEC)
11839 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11840 TREE_VEC_ELT (*rhs_p, 1)
11841 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11842 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11843 TREE_VEC_ELT (*rhs_p, 2)
11844 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11846 else if (!is_gimple_min_invariant (*rhs_p))
11847 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11848 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11849 recompute_tree_invariant_for_addr_expr (*rhs_p);
11851 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11852 if (TREE_CODE (*rhs_p) == TREE_VEC)
11854 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11855 TREE_VEC_ELT (*rhs_p, 1)
11856 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11857 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11858 TREE_VEC_ELT (*rhs_p, 2)
11859 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11861 else if (!is_gimple_min_invariant (*rhs_p))
11862 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11863 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11864 recompute_tree_invariant_for_addr_expr (*rhs_p);
11866 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11867 if (!is_gimple_min_invariant (*rhs_p))
11868 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11870 if (rclauses)
11871 gimple_seq_add_seq (&tred_ilist, cnt_list);
11872 else
11873 gimple_seq_add_seq (&body, cnt_list);
11875 /* Once lowered, extract the bounds and clauses. */
11876 omp_extract_for_data (stmt, &fd, NULL);
11878 if (is_gimple_omp_oacc (ctx->stmt)
11879 && !ctx_in_oacc_kernels_region (ctx))
11880 lower_oacc_head_tail (gimple_location (stmt),
11881 gimple_omp_for_clauses (stmt), private_marker,
11882 &oacc_head, &oacc_tail, ctx);
11884 /* Add OpenACC partitioning and reduction markers just before the loop. */
11885 if (oacc_head)
11886 gimple_seq_add_seq (&body, oacc_head);
11888 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11890 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11891 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11892 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11893 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11895 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11896 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11897 OMP_CLAUSE_LINEAR_STEP (c)
11898 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11899 ctx);
11902 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11903 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11904 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11905 else
11907 gimple_seq_add_stmt (&body, stmt);
11908 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11911 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11912 fd.loop.v));
11914 /* After the loop, add exit clauses. */
11915 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11917 if (clist)
11919 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11920 gcall *g = gimple_build_call (fndecl, 0);
11921 gimple_seq_add_stmt (&body, g);
11922 gimple_seq_add_seq (&body, clist);
11923 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11924 g = gimple_build_call (fndecl, 0);
11925 gimple_seq_add_stmt (&body, g);
11928 if (ctx->cancellable)
11929 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11931 gimple_seq_add_seq (&body, dlist);
11933 if (rclauses)
11935 gimple_seq_add_seq (&tred_ilist, body);
11936 body = tred_ilist;
11939 body = maybe_catch_exception (body);
11941 /* Region exit marker goes at the end of the loop body. */
11942 gimple *g = gimple_build_omp_return (fd.have_nowait);
11943 gimple_seq_add_stmt (&body, g);
11945 gimple_seq_add_seq (&body, tred_dlist);
11947 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11949 if (rclauses)
11950 OMP_CLAUSE_DECL (rclauses) = rtmp;
11952 /* Add OpenACC joining and reduction markers just after the loop. */
11953 if (oacc_tail)
11954 gimple_seq_add_seq (&body, oacc_tail);
11956 pop_gimplify_context (new_stmt);
11958 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11959 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11960 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11961 if (BLOCK_VARS (block))
11962 TREE_USED (block) = 1;
11964 gimple_bind_set_body (new_stmt, body);
11965 gimple_omp_set_body (stmt, NULL);
11966 gimple_omp_for_set_pre_body (stmt, NULL);
11969 /* Callback for walk_stmts. Check if the current statement only contains
11970 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11972 static tree
11973 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11974 bool *handled_ops_p,
11975 struct walk_stmt_info *wi)
11977 int *info = (int *) wi->info;
11978 gimple *stmt = gsi_stmt (*gsi_p);
11980 *handled_ops_p = true;
11981 switch (gimple_code (stmt))
11983 WALK_SUBSTMTS;
11985 case GIMPLE_DEBUG:
11986 break;
11987 case GIMPLE_OMP_FOR:
11988 case GIMPLE_OMP_SECTIONS:
11989 *info = *info == 0 ? 1 : -1;
11990 break;
11991 default:
11992 *info = -1;
11993 break;
11995 return NULL;
11998 struct omp_taskcopy_context
12000 /* This field must be at the beginning, as we do "inheritance": Some
12001 callback functions for tree-inline.cc (e.g., omp_copy_decl)
12002 receive a copy_body_data pointer that is up-casted to an
12003 omp_context pointer. */
12004 copy_body_data cb;
12005 omp_context *ctx;
12008 static tree
12009 task_copyfn_copy_decl (tree var, copy_body_data *cb)
12011 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
12013 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
12014 return create_tmp_var (TREE_TYPE (var));
12016 return var;
12019 static tree
12020 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
12022 tree name, new_fields = NULL, type, f;
12024 type = lang_hooks.types.make_type (RECORD_TYPE);
12025 name = DECL_NAME (TYPE_NAME (orig_type));
12026 name = build_decl (gimple_location (tcctx->ctx->stmt),
12027 TYPE_DECL, name, type);
12028 TYPE_NAME (type) = name;
12030 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
12032 tree new_f = copy_node (f);
12033 DECL_CONTEXT (new_f) = type;
12034 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
12035 TREE_CHAIN (new_f) = new_fields;
12036 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
12037 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
12038 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
12039 &tcctx->cb, NULL);
12040 new_fields = new_f;
12041 tcctx->cb.decl_map->put (f, new_f);
12043 TYPE_FIELDS (type) = nreverse (new_fields);
12044 layout_type (type);
12045 return type;
12048 /* Create task copyfn. */
12050 static void
12051 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
12053 struct function *child_cfun;
12054 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
12055 tree record_type, srecord_type, bind, list;
12056 bool record_needs_remap = false, srecord_needs_remap = false;
12057 splay_tree_node n;
12058 struct omp_taskcopy_context tcctx;
12059 location_t loc = gimple_location (task_stmt);
12060 size_t looptempno = 0;
12062 child_fn = gimple_omp_task_copy_fn (task_stmt);
12063 task_cpyfns.safe_push (task_stmt);
12064 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
12065 gcc_assert (child_cfun->cfg == NULL);
12066 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
12068 /* Reset DECL_CONTEXT on function arguments. */
12069 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
12070 DECL_CONTEXT (t) = child_fn;
12072 /* Populate the function. */
12073 push_gimplify_context ();
12074 push_cfun (child_cfun);
12076 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12077 TREE_SIDE_EFFECTS (bind) = 1;
12078 list = NULL;
12079 DECL_SAVED_TREE (child_fn) = bind;
12080 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
12082 /* Remap src and dst argument types if needed. */
12083 record_type = ctx->record_type;
12084 srecord_type = ctx->srecord_type;
12085 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
12086 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
12088 record_needs_remap = true;
12089 break;
12091 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
12092 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
12094 srecord_needs_remap = true;
12095 break;
12098 if (record_needs_remap || srecord_needs_remap)
12100 memset (&tcctx, '\0', sizeof (tcctx));
12101 tcctx.cb.src_fn = ctx->cb.src_fn;
12102 tcctx.cb.dst_fn = child_fn;
12103 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
12104 gcc_checking_assert (tcctx.cb.src_node);
12105 tcctx.cb.dst_node = tcctx.cb.src_node;
12106 tcctx.cb.src_cfun = ctx->cb.src_cfun;
12107 tcctx.cb.copy_decl = task_copyfn_copy_decl;
12108 tcctx.cb.eh_lp_nr = 0;
12109 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
12110 tcctx.cb.decl_map = new hash_map<tree, tree>;
12111 tcctx.ctx = ctx;
12113 if (record_needs_remap)
12114 record_type = task_copyfn_remap_type (&tcctx, record_type);
12115 if (srecord_needs_remap)
12116 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
12118 else
12119 tcctx.cb.decl_map = NULL;
12121 arg = DECL_ARGUMENTS (child_fn);
12122 TREE_TYPE (arg) = build_pointer_type (record_type);
12123 sarg = DECL_CHAIN (arg);
12124 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
12126 /* First pass: initialize temporaries used in record_type and srecord_type
12127 sizes and field offsets. */
12128 if (tcctx.cb.decl_map)
12129 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12130 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12132 tree *p;
12134 decl = OMP_CLAUSE_DECL (c);
12135 p = tcctx.cb.decl_map->get (decl);
12136 if (p == NULL)
12137 continue;
12138 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12139 sf = (tree) n->value;
12140 sf = *tcctx.cb.decl_map->get (sf);
12141 src = build_simple_mem_ref_loc (loc, sarg);
12142 src = omp_build_component_ref (src, sf);
12143 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
12144 append_to_statement_list (t, &list);
12147 /* Second pass: copy shared var pointers and copy construct non-VLA
12148 firstprivate vars. */
12149 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12150 switch (OMP_CLAUSE_CODE (c))
12152 splay_tree_key key;
12153 case OMP_CLAUSE_SHARED:
12154 decl = OMP_CLAUSE_DECL (c);
12155 key = (splay_tree_key) decl;
12156 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
12157 key = (splay_tree_key) &DECL_UID (decl);
12158 n = splay_tree_lookup (ctx->field_map, key);
12159 if (n == NULL)
12160 break;
12161 f = (tree) n->value;
12162 if (tcctx.cb.decl_map)
12163 f = *tcctx.cb.decl_map->get (f);
12164 n = splay_tree_lookup (ctx->sfield_map, key);
12165 sf = (tree) n->value;
12166 if (tcctx.cb.decl_map)
12167 sf = *tcctx.cb.decl_map->get (sf);
12168 src = build_simple_mem_ref_loc (loc, sarg);
12169 src = omp_build_component_ref (src, sf);
12170 dst = build_simple_mem_ref_loc (loc, arg);
12171 dst = omp_build_component_ref (dst, f);
12172 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12173 append_to_statement_list (t, &list);
12174 break;
12175 case OMP_CLAUSE_REDUCTION:
12176 case OMP_CLAUSE_IN_REDUCTION:
12177 decl = OMP_CLAUSE_DECL (c);
12178 if (TREE_CODE (decl) == MEM_REF)
12180 decl = TREE_OPERAND (decl, 0);
12181 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12182 decl = TREE_OPERAND (decl, 0);
12183 if (TREE_CODE (decl) == INDIRECT_REF
12184 || TREE_CODE (decl) == ADDR_EXPR)
12185 decl = TREE_OPERAND (decl, 0);
12187 key = (splay_tree_key) decl;
12188 n = splay_tree_lookup (ctx->field_map, key);
12189 if (n == NULL)
12190 break;
12191 f = (tree) n->value;
12192 if (tcctx.cb.decl_map)
12193 f = *tcctx.cb.decl_map->get (f);
12194 n = splay_tree_lookup (ctx->sfield_map, key);
12195 sf = (tree) n->value;
12196 if (tcctx.cb.decl_map)
12197 sf = *tcctx.cb.decl_map->get (sf);
12198 src = build_simple_mem_ref_loc (loc, sarg);
12199 src = omp_build_component_ref (src, sf);
12200 if (decl != OMP_CLAUSE_DECL (c)
12201 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12202 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12203 src = build_simple_mem_ref_loc (loc, src);
12204 dst = build_simple_mem_ref_loc (loc, arg);
12205 dst = omp_build_component_ref (dst, f);
12206 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12207 append_to_statement_list (t, &list);
12208 break;
12209 case OMP_CLAUSE__LOOPTEMP_:
12210 /* Fields for first two _looptemp_ clauses are initialized by
12211 GOMP_taskloop*, the rest are handled like firstprivate. */
12212 if (looptempno < 2)
12214 looptempno++;
12215 break;
12217 /* FALLTHRU */
12218 case OMP_CLAUSE__REDUCTEMP_:
12219 case OMP_CLAUSE_FIRSTPRIVATE:
12220 decl = OMP_CLAUSE_DECL (c);
12221 if (is_variable_sized (decl))
12222 break;
12223 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12224 if (n == NULL)
12225 break;
12226 f = (tree) n->value;
12227 if (tcctx.cb.decl_map)
12228 f = *tcctx.cb.decl_map->get (f);
12229 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12230 if (n != NULL)
12232 sf = (tree) n->value;
12233 if (tcctx.cb.decl_map)
12234 sf = *tcctx.cb.decl_map->get (sf);
12235 src = build_simple_mem_ref_loc (loc, sarg);
12236 src = omp_build_component_ref (src, sf);
12237 if (use_pointer_for_field (decl, NULL)
12238 || omp_privatize_by_reference (decl))
12239 src = build_simple_mem_ref_loc (loc, src);
12241 else
12242 src = decl;
12243 dst = build_simple_mem_ref_loc (loc, arg);
12244 dst = omp_build_component_ref (dst, f);
12245 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12246 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12247 else
12249 if (ctx->allocate_map)
12250 if (tree *allocatorp = ctx->allocate_map->get (decl))
12252 tree allocator = *allocatorp;
12253 HOST_WIDE_INT ialign = 0;
12254 if (TREE_CODE (allocator) == TREE_LIST)
12256 ialign = tree_to_uhwi (TREE_VALUE (allocator));
12257 allocator = TREE_PURPOSE (allocator);
12259 if (TREE_CODE (allocator) != INTEGER_CST)
12261 n = splay_tree_lookup (ctx->sfield_map,
12262 (splay_tree_key) allocator);
12263 allocator = (tree) n->value;
12264 if (tcctx.cb.decl_map)
12265 allocator = *tcctx.cb.decl_map->get (allocator);
12266 tree a = build_simple_mem_ref_loc (loc, sarg);
12267 allocator = omp_build_component_ref (a, allocator);
12269 allocator = fold_convert (pointer_sized_int_node, allocator);
12270 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12271 tree align = build_int_cst (size_type_node,
12272 MAX (ialign,
12273 DECL_ALIGN_UNIT (decl)));
12274 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12275 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12276 allocator);
12277 ptr = fold_convert (TREE_TYPE (dst), ptr);
12278 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12279 append_to_statement_list (t, &list);
12280 dst = build_simple_mem_ref_loc (loc, dst);
12282 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12284 append_to_statement_list (t, &list);
12285 break;
12286 case OMP_CLAUSE_PRIVATE:
12287 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12288 break;
12289 decl = OMP_CLAUSE_DECL (c);
12290 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12291 f = (tree) n->value;
12292 if (tcctx.cb.decl_map)
12293 f = *tcctx.cb.decl_map->get (f);
12294 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12295 if (n != NULL)
12297 sf = (tree) n->value;
12298 if (tcctx.cb.decl_map)
12299 sf = *tcctx.cb.decl_map->get (sf);
12300 src = build_simple_mem_ref_loc (loc, sarg);
12301 src = omp_build_component_ref (src, sf);
12302 if (use_pointer_for_field (decl, NULL))
12303 src = build_simple_mem_ref_loc (loc, src);
12305 else
12306 src = decl;
12307 dst = build_simple_mem_ref_loc (loc, arg);
12308 dst = omp_build_component_ref (dst, f);
12309 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12310 append_to_statement_list (t, &list);
12311 break;
12312 default:
12313 break;
12316 /* Last pass: handle VLA firstprivates. */
12317 if (tcctx.cb.decl_map)
12318 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12319 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12321 tree ind, ptr, df;
12323 decl = OMP_CLAUSE_DECL (c);
12324 if (!is_variable_sized (decl))
12325 continue;
12326 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12327 if (n == NULL)
12328 continue;
12329 f = (tree) n->value;
12330 f = *tcctx.cb.decl_map->get (f);
12331 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12332 ind = DECL_VALUE_EXPR (decl);
12333 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12334 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12335 n = splay_tree_lookup (ctx->sfield_map,
12336 (splay_tree_key) TREE_OPERAND (ind, 0));
12337 sf = (tree) n->value;
12338 sf = *tcctx.cb.decl_map->get (sf);
12339 src = build_simple_mem_ref_loc (loc, sarg);
12340 src = omp_build_component_ref (src, sf);
12341 src = build_simple_mem_ref_loc (loc, src);
12342 dst = build_simple_mem_ref_loc (loc, arg);
12343 dst = omp_build_component_ref (dst, f);
12344 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12345 append_to_statement_list (t, &list);
12346 n = splay_tree_lookup (ctx->field_map,
12347 (splay_tree_key) TREE_OPERAND (ind, 0));
12348 df = (tree) n->value;
12349 df = *tcctx.cb.decl_map->get (df);
12350 ptr = build_simple_mem_ref_loc (loc, arg);
12351 ptr = omp_build_component_ref (ptr, df);
12352 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12353 build_fold_addr_expr_loc (loc, dst));
12354 append_to_statement_list (t, &list);
12357 t = build1 (RETURN_EXPR, void_type_node, NULL);
12358 append_to_statement_list (t, &list);
12360 if (tcctx.cb.decl_map)
12361 delete tcctx.cb.decl_map;
12362 pop_gimplify_context (NULL);
12363 BIND_EXPR_BODY (bind) = list;
12364 pop_cfun ();
12367 static void
12368 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12370 tree c, clauses;
12371 gimple *g;
12372 size_t cnt[5] = { 0, 0, 0, 0, 0 }, idx = 2, i;
12374 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12375 gcc_assert (clauses);
12376 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12377 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12378 switch (OMP_CLAUSE_DEPEND_KIND (c))
12380 case OMP_CLAUSE_DEPEND_LAST:
12381 /* Lowering already done at gimplification. */
12382 return;
12383 case OMP_CLAUSE_DEPEND_IN:
12384 cnt[2]++;
12385 break;
12386 case OMP_CLAUSE_DEPEND_OUT:
12387 case OMP_CLAUSE_DEPEND_INOUT:
12388 cnt[0]++;
12389 break;
12390 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12391 cnt[1]++;
12392 break;
12393 case OMP_CLAUSE_DEPEND_DEPOBJ:
12394 cnt[3]++;
12395 break;
12396 case OMP_CLAUSE_DEPEND_INOUTSET:
12397 cnt[4]++;
12398 break;
12399 default:
12400 gcc_unreachable ();
12402 if (cnt[1] || cnt[3] || cnt[4])
12403 idx = 5;
12404 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3] + cnt[4];
12405 size_t inoutidx = total + idx;
12406 tree type = build_array_type_nelts (ptr_type_node, total + idx + 2 * cnt[4]);
12407 tree array = create_tmp_var (type);
12408 TREE_ADDRESSABLE (array) = 1;
12409 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12410 NULL_TREE);
12411 if (idx == 5)
12413 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12414 gimple_seq_add_stmt (iseq, g);
12415 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12416 NULL_TREE);
12418 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12419 gimple_seq_add_stmt (iseq, g);
12420 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12422 r = build4 (ARRAY_REF, ptr_type_node, array,
12423 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12424 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12425 gimple_seq_add_stmt (iseq, g);
12427 for (i = 0; i < 5; i++)
12429 if (cnt[i] == 0)
12430 continue;
12431 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12432 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12433 continue;
12434 else
12436 switch (OMP_CLAUSE_DEPEND_KIND (c))
12438 case OMP_CLAUSE_DEPEND_IN:
12439 if (i != 2)
12440 continue;
12441 break;
12442 case OMP_CLAUSE_DEPEND_OUT:
12443 case OMP_CLAUSE_DEPEND_INOUT:
12444 if (i != 0)
12445 continue;
12446 break;
12447 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12448 if (i != 1)
12449 continue;
12450 break;
12451 case OMP_CLAUSE_DEPEND_DEPOBJ:
12452 if (i != 3)
12453 continue;
12454 break;
12455 case OMP_CLAUSE_DEPEND_INOUTSET:
12456 if (i != 4)
12457 continue;
12458 break;
12459 default:
12460 gcc_unreachable ();
12462 tree t = OMP_CLAUSE_DECL (c);
12463 if (i == 4)
12465 t = build4 (ARRAY_REF, ptr_type_node, array,
12466 size_int (inoutidx), NULL_TREE, NULL_TREE);
12467 t = build_fold_addr_expr (t);
12468 inoutidx += 2;
12470 t = fold_convert (ptr_type_node, t);
12471 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12472 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12473 NULL_TREE, NULL_TREE);
12474 g = gimple_build_assign (r, t);
12475 gimple_seq_add_stmt (iseq, g);
12478 if (cnt[4])
12479 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12480 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12481 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_INOUTSET)
12483 tree t = OMP_CLAUSE_DECL (c);
12484 t = fold_convert (ptr_type_node, t);
12485 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12486 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12487 NULL_TREE, NULL_TREE);
12488 g = gimple_build_assign (r, t);
12489 gimple_seq_add_stmt (iseq, g);
12490 t = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
12491 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12492 NULL_TREE, NULL_TREE);
12493 g = gimple_build_assign (r, t);
12494 gimple_seq_add_stmt (iseq, g);
12497 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12498 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12499 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12500 OMP_CLAUSE_CHAIN (c) = *pclauses;
12501 *pclauses = c;
12502 tree clobber = build_clobber (type);
12503 g = gimple_build_assign (array, clobber);
12504 gimple_seq_add_stmt (oseq, g);
12507 /* Lower the OpenMP parallel or task directive in the current statement
12508 in GSI_P. CTX holds context information for the directive. */
12510 static void
12511 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12513 tree clauses;
12514 tree child_fn, t;
12515 gimple *stmt = gsi_stmt (*gsi_p);
12516 gbind *par_bind, *bind, *dep_bind = NULL;
12517 gimple_seq par_body;
12518 location_t loc = gimple_location (stmt);
12520 clauses = gimple_omp_taskreg_clauses (stmt);
12521 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12522 && gimple_omp_task_taskwait_p (stmt))
12524 par_bind = NULL;
12525 par_body = NULL;
12527 else
12529 par_bind
12530 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12531 par_body = gimple_bind_body (par_bind);
12533 child_fn = ctx->cb.dst_fn;
12534 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12535 && !gimple_omp_parallel_combined_p (stmt))
12537 struct walk_stmt_info wi;
12538 int ws_num = 0;
12540 memset (&wi, 0, sizeof (wi));
12541 wi.info = &ws_num;
12542 wi.val_only = true;
12543 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12544 if (ws_num == 1)
12545 gimple_omp_parallel_set_combined_p (stmt, true);
12547 gimple_seq dep_ilist = NULL;
12548 gimple_seq dep_olist = NULL;
12549 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12550 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12552 push_gimplify_context ();
12553 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12554 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12555 &dep_ilist, &dep_olist);
12558 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12559 && gimple_omp_task_taskwait_p (stmt))
12561 if (dep_bind)
12563 gsi_replace (gsi_p, dep_bind, true);
12564 gimple_bind_add_seq (dep_bind, dep_ilist);
12565 gimple_bind_add_stmt (dep_bind, stmt);
12566 gimple_bind_add_seq (dep_bind, dep_olist);
12567 pop_gimplify_context (dep_bind);
12569 return;
12572 if (ctx->srecord_type)
12573 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12575 gimple_seq tskred_ilist = NULL;
12576 gimple_seq tskred_olist = NULL;
12577 if ((is_task_ctx (ctx)
12578 && gimple_omp_task_taskloop_p (ctx->stmt)
12579 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12580 OMP_CLAUSE_REDUCTION))
12581 || (is_parallel_ctx (ctx)
12582 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12583 OMP_CLAUSE__REDUCTEMP_)))
12585 if (dep_bind == NULL)
12587 push_gimplify_context ();
12588 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12590 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12591 : OMP_PARALLEL,
12592 gimple_omp_taskreg_clauses (ctx->stmt),
12593 &tskred_ilist, &tskred_olist);
12596 push_gimplify_context ();
12598 gimple_seq par_olist = NULL;
12599 gimple_seq par_ilist = NULL;
12600 gimple_seq par_rlist = NULL;
12601 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12602 lower_omp (&par_body, ctx);
12603 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12604 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12606 /* Declare all the variables created by mapping and the variables
12607 declared in the scope of the parallel body. */
12608 record_vars_into (ctx->block_vars, child_fn);
12609 maybe_remove_omp_member_access_dummy_vars (par_bind);
12610 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12612 if (ctx->record_type)
12614 ctx->sender_decl
12615 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12616 : ctx->record_type, ".omp_data_o");
12617 DECL_NAMELESS (ctx->sender_decl) = 1;
12618 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12619 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12622 gimple_seq olist = NULL;
12623 gimple_seq ilist = NULL;
12624 lower_send_clauses (clauses, &ilist, &olist, ctx);
12625 lower_send_shared_vars (&ilist, &olist, ctx);
12627 if (ctx->record_type)
12629 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12630 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12631 clobber));
12634 /* Once all the expansions are done, sequence all the different
12635 fragments inside gimple_omp_body. */
12637 gimple_seq new_body = NULL;
12639 if (ctx->record_type)
12641 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12642 /* fixup_child_record_type might have changed receiver_decl's type. */
12643 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12644 gimple_seq_add_stmt (&new_body,
12645 gimple_build_assign (ctx->receiver_decl, t));
12648 gimple_seq_add_seq (&new_body, par_ilist);
12649 gimple_seq_add_seq (&new_body, par_body);
12650 gimple_seq_add_seq (&new_body, par_rlist);
12651 if (ctx->cancellable)
12652 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12653 gimple_seq_add_seq (&new_body, par_olist);
12654 new_body = maybe_catch_exception (new_body);
12655 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12656 gimple_seq_add_stmt (&new_body,
12657 gimple_build_omp_continue (integer_zero_node,
12658 integer_zero_node));
12659 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12660 gimple_omp_set_body (stmt, new_body);
12662 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12663 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12664 else
12665 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12666 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12667 gimple_bind_add_seq (bind, ilist);
12668 gimple_bind_add_stmt (bind, stmt);
12669 gimple_bind_add_seq (bind, olist);
12671 pop_gimplify_context (NULL);
12673 if (dep_bind)
12675 gimple_bind_add_seq (dep_bind, dep_ilist);
12676 gimple_bind_add_seq (dep_bind, tskred_ilist);
12677 gimple_bind_add_stmt (dep_bind, bind);
12678 gimple_bind_add_seq (dep_bind, tskred_olist);
12679 gimple_bind_add_seq (dep_bind, dep_olist);
12680 pop_gimplify_context (dep_bind);
12684 /* Lower the GIMPLE_OMP_TARGET in the current statement
12685 in GSI_P. CTX holds context information for the directive. */
12687 static void
12688 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12690 tree clauses;
12691 tree child_fn, t, c;
12692 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12693 gbind *tgt_bind, *bind, *dep_bind = NULL;
12694 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12695 location_t loc = gimple_location (stmt);
12696 bool offloaded, data_region;
12697 unsigned int map_cnt = 0;
12698 tree in_reduction_clauses = NULL_TREE;
12700 offloaded = is_gimple_omp_offloaded (stmt);
12701 switch (gimple_omp_target_kind (stmt))
12703 case GF_OMP_TARGET_KIND_REGION:
12704 tree *p, *q;
12705 q = &in_reduction_clauses;
12706 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12707 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12709 *q = *p;
12710 q = &OMP_CLAUSE_CHAIN (*q);
12711 *p = OMP_CLAUSE_CHAIN (*p);
12713 else
12714 p = &OMP_CLAUSE_CHAIN (*p);
12715 *q = NULL_TREE;
12716 *p = in_reduction_clauses;
12717 /* FALLTHRU */
12718 case GF_OMP_TARGET_KIND_UPDATE:
12719 case GF_OMP_TARGET_KIND_ENTER_DATA:
12720 case GF_OMP_TARGET_KIND_EXIT_DATA:
12721 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12722 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12723 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12724 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12725 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12726 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12727 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12728 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12729 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12730 data_region = false;
12731 break;
12732 case GF_OMP_TARGET_KIND_DATA:
12733 case GF_OMP_TARGET_KIND_OACC_DATA:
12734 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12735 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12736 data_region = true;
12737 break;
12738 default:
12739 gcc_unreachable ();
12742 /* Ensure that requires map is written via output_offload_tables, even if only
12743 'target (enter/exit) data' is used in the translation unit. */
12744 if (ENABLE_OFFLOADING && (omp_requires_mask & OMP_REQUIRES_TARGET_USED))
12745 g->have_offload = true;
12747 clauses = gimple_omp_target_clauses (stmt);
12749 gimple_seq dep_ilist = NULL;
12750 gimple_seq dep_olist = NULL;
12751 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12752 if (has_depend || in_reduction_clauses)
12754 push_gimplify_context ();
12755 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12756 if (has_depend)
12757 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12758 &dep_ilist, &dep_olist);
12759 if (in_reduction_clauses)
12760 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12761 ctx, NULL);
12764 tgt_bind = NULL;
12765 tgt_body = NULL;
12766 if (offloaded)
12768 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12769 tgt_body = gimple_bind_body (tgt_bind);
12771 else if (data_region)
12772 tgt_body = gimple_omp_body (stmt);
12773 child_fn = ctx->cb.dst_fn;
12775 push_gimplify_context ();
12776 fplist = NULL;
12778 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12779 switch (OMP_CLAUSE_CODE (c))
12781 tree var, x;
12783 default:
12784 break;
12785 case OMP_CLAUSE_MAP:
12786 #if CHECKING_P
12787 /* First check what we're prepared to handle in the following. */
12788 switch (OMP_CLAUSE_MAP_KIND (c))
12790 case GOMP_MAP_ALLOC:
12791 case GOMP_MAP_TO:
12792 case GOMP_MAP_FROM:
12793 case GOMP_MAP_TOFROM:
12794 case GOMP_MAP_POINTER:
12795 case GOMP_MAP_TO_PSET:
12796 case GOMP_MAP_DELETE:
12797 case GOMP_MAP_RELEASE:
12798 case GOMP_MAP_ALWAYS_TO:
12799 case GOMP_MAP_ALWAYS_FROM:
12800 case GOMP_MAP_ALWAYS_TOFROM:
12801 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12802 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12803 case GOMP_MAP_STRUCT:
12804 case GOMP_MAP_ALWAYS_POINTER:
12805 case GOMP_MAP_ATTACH:
12806 case GOMP_MAP_DETACH:
12807 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
12808 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
12809 break;
12810 case GOMP_MAP_IF_PRESENT:
12811 case GOMP_MAP_FORCE_ALLOC:
12812 case GOMP_MAP_FORCE_TO:
12813 case GOMP_MAP_FORCE_FROM:
12814 case GOMP_MAP_FORCE_TOFROM:
12815 case GOMP_MAP_FORCE_PRESENT:
12816 case GOMP_MAP_FORCE_DEVICEPTR:
12817 case GOMP_MAP_DEVICE_RESIDENT:
12818 case GOMP_MAP_LINK:
12819 case GOMP_MAP_FORCE_DETACH:
12820 gcc_assert (is_gimple_omp_oacc (stmt));
12821 break;
12822 default:
12823 gcc_unreachable ();
12825 #endif
12826 /* FALLTHRU */
12827 case OMP_CLAUSE_TO:
12828 case OMP_CLAUSE_FROM:
12829 oacc_firstprivate:
12830 var = OMP_CLAUSE_DECL (c);
12831 if (!DECL_P (var))
12833 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12834 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12835 && (OMP_CLAUSE_MAP_KIND (c)
12836 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12837 map_cnt++;
12838 continue;
12841 if (DECL_SIZE (var)
12842 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12844 tree var2 = DECL_VALUE_EXPR (var);
12845 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12846 var2 = TREE_OPERAND (var2, 0);
12847 gcc_assert (DECL_P (var2));
12848 var = var2;
12851 if (offloaded
12852 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12853 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12854 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12856 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12858 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12859 && varpool_node::get_create (var)->offloadable)
12860 continue;
12862 tree type = build_pointer_type (TREE_TYPE (var));
12863 tree new_var = lookup_decl (var, ctx);
12864 x = create_tmp_var_raw (type, get_name (new_var));
12865 gimple_add_tmp_var (x);
12866 x = build_simple_mem_ref (x);
12867 SET_DECL_VALUE_EXPR (new_var, x);
12868 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12870 continue;
12873 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12874 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12875 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12876 && is_omp_target (stmt))
12878 gcc_assert (maybe_lookup_field (c, ctx));
12879 map_cnt++;
12880 continue;
12883 if (!maybe_lookup_field (var, ctx))
12884 continue;
12886 /* Don't remap compute constructs' reduction variables, because the
12887 intermediate result must be local to each gang. */
12888 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12889 && is_gimple_omp_oacc (ctx->stmt)
12890 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12892 x = build_receiver_ref (var, true, ctx);
12893 tree new_var = lookup_decl (var, ctx);
12895 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12896 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12897 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12898 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12899 x = build_simple_mem_ref (x);
12900 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12902 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12903 if (omp_privatize_by_reference (new_var)
12904 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12905 || DECL_BY_REFERENCE (var)))
12907 /* Create a local object to hold the instance
12908 value. */
12909 tree type = TREE_TYPE (TREE_TYPE (new_var));
12910 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12911 tree inst = create_tmp_var (type, id);
12912 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12913 x = build_fold_addr_expr (inst);
12915 gimplify_assign (new_var, x, &fplist);
12917 else if (DECL_P (new_var))
12919 SET_DECL_VALUE_EXPR (new_var, x);
12920 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12922 else
12923 gcc_unreachable ();
12925 map_cnt++;
12926 break;
12928 case OMP_CLAUSE_FIRSTPRIVATE:
12929 omp_firstprivate_recv:
12930 gcc_checking_assert (offloaded);
12931 if (is_gimple_omp_oacc (ctx->stmt))
12933 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12934 gcc_checking_assert (!is_oacc_kernels (ctx));
12935 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12936 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12938 goto oacc_firstprivate;
12940 map_cnt++;
12941 var = OMP_CLAUSE_DECL (c);
12942 if (!omp_privatize_by_reference (var)
12943 && !is_gimple_reg_type (TREE_TYPE (var)))
12945 tree new_var = lookup_decl (var, ctx);
12946 if (is_variable_sized (var))
12948 tree pvar = DECL_VALUE_EXPR (var);
12949 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12950 pvar = TREE_OPERAND (pvar, 0);
12951 gcc_assert (DECL_P (pvar));
12952 tree new_pvar = lookup_decl (pvar, ctx);
12953 x = build_fold_indirect_ref (new_pvar);
12954 TREE_THIS_NOTRAP (x) = 1;
12956 else
12957 x = build_receiver_ref (var, true, ctx);
12958 SET_DECL_VALUE_EXPR (new_var, x);
12959 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12961 /* Fortran array descriptors: firstprivate of data + attach. */
12962 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
12963 && lang_hooks.decls.omp_array_data (var, true))
12964 map_cnt += 2;
12965 break;
12967 case OMP_CLAUSE_PRIVATE:
12968 gcc_checking_assert (offloaded);
12969 if (is_gimple_omp_oacc (ctx->stmt))
12971 /* No 'private' clauses on OpenACC 'kernels'. */
12972 gcc_checking_assert (!is_oacc_kernels (ctx));
12973 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12974 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12976 break;
12978 var = OMP_CLAUSE_DECL (c);
12979 if (is_variable_sized (var))
12981 tree new_var = lookup_decl (var, ctx);
12982 tree pvar = DECL_VALUE_EXPR (var);
12983 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12984 pvar = TREE_OPERAND (pvar, 0);
12985 gcc_assert (DECL_P (pvar));
12986 tree new_pvar = lookup_decl (pvar, ctx);
12987 x = build_fold_indirect_ref (new_pvar);
12988 TREE_THIS_NOTRAP (x) = 1;
12989 SET_DECL_VALUE_EXPR (new_var, x);
12990 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12992 break;
12994 case OMP_CLAUSE_USE_DEVICE_PTR:
12995 case OMP_CLAUSE_USE_DEVICE_ADDR:
12996 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12997 case OMP_CLAUSE_IS_DEVICE_PTR:
12998 var = OMP_CLAUSE_DECL (c);
12999 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13001 while (TREE_CODE (var) == INDIRECT_REF
13002 || TREE_CODE (var) == ARRAY_REF)
13003 var = TREE_OPERAND (var, 0);
13004 if (lang_hooks.decls.omp_array_data (var, true))
13005 goto omp_firstprivate_recv;
13007 map_cnt++;
13008 if (is_variable_sized (var))
13010 tree new_var = lookup_decl (var, ctx);
13011 tree pvar = DECL_VALUE_EXPR (var);
13012 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13013 pvar = TREE_OPERAND (pvar, 0);
13014 gcc_assert (DECL_P (pvar));
13015 tree new_pvar = lookup_decl (pvar, ctx);
13016 x = build_fold_indirect_ref (new_pvar);
13017 TREE_THIS_NOTRAP (x) = 1;
13018 SET_DECL_VALUE_EXPR (new_var, x);
13019 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
13021 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13022 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13023 && !omp_privatize_by_reference (var)
13024 && !omp_is_allocatable_or_ptr (var)
13025 && !lang_hooks.decls.omp_array_data (var, true))
13026 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13028 tree new_var = lookup_decl (var, ctx);
13029 tree type = build_pointer_type (TREE_TYPE (var));
13030 x = create_tmp_var_raw (type, get_name (new_var));
13031 gimple_add_tmp_var (x);
13032 x = build_simple_mem_ref (x);
13033 SET_DECL_VALUE_EXPR (new_var, x);
13034 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
13036 else
13038 tree new_var = lookup_decl (var, ctx);
13039 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
13040 gimple_add_tmp_var (x);
13041 SET_DECL_VALUE_EXPR (new_var, x);
13042 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
13044 break;
13047 if (offloaded)
13049 target_nesting_level++;
13050 lower_omp (&tgt_body, ctx);
13051 target_nesting_level--;
13053 else if (data_region)
13054 lower_omp (&tgt_body, ctx);
13056 if (offloaded)
13058 /* Declare all the variables created by mapping and the variables
13059 declared in the scope of the target body. */
13060 record_vars_into (ctx->block_vars, child_fn);
13061 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
13062 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
13065 olist = NULL;
13066 ilist = NULL;
13067 if (ctx->record_type)
13069 ctx->sender_decl
13070 = create_tmp_var (ctx->record_type, ".omp_data_arr");
13071 DECL_NAMELESS (ctx->sender_decl) = 1;
13072 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
13073 t = make_tree_vec (3);
13074 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
13075 TREE_VEC_ELT (t, 1)
13076 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
13077 ".omp_data_sizes");
13078 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
13079 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
13080 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
13081 tree tkind_type = short_unsigned_type_node;
13082 int talign_shift = 8;
13083 TREE_VEC_ELT (t, 2)
13084 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
13085 ".omp_data_kinds");
13086 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
13087 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
13088 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
13089 gimple_omp_target_set_data_arg (stmt, t);
13091 vec<constructor_elt, va_gc> *vsize;
13092 vec<constructor_elt, va_gc> *vkind;
13093 vec_alloc (vsize, map_cnt);
13094 vec_alloc (vkind, map_cnt);
13095 unsigned int map_idx = 0;
13097 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13098 switch (OMP_CLAUSE_CODE (c))
13100 tree ovar, nc, s, purpose, var, x, type;
13101 unsigned int talign;
13103 default:
13104 break;
13106 case OMP_CLAUSE_MAP:
13107 case OMP_CLAUSE_TO:
13108 case OMP_CLAUSE_FROM:
13109 oacc_firstprivate_map:
13110 nc = c;
13111 ovar = OMP_CLAUSE_DECL (c);
13112 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13113 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13114 || (OMP_CLAUSE_MAP_KIND (c)
13115 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13116 break;
13117 if (!DECL_P (ovar))
13119 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13120 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
13122 nc = OMP_CLAUSE_CHAIN (c);
13123 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
13124 == get_base_address (ovar));
13125 ovar = OMP_CLAUSE_DECL (nc);
13127 else
13129 tree x = build_sender_ref (ovar, ctx);
13130 tree v = ovar;
13131 if (in_reduction_clauses
13132 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13133 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13135 v = unshare_expr (v);
13136 tree *p = &v;
13137 while (handled_component_p (*p)
13138 || TREE_CODE (*p) == INDIRECT_REF
13139 || TREE_CODE (*p) == ADDR_EXPR
13140 || TREE_CODE (*p) == MEM_REF
13141 || TREE_CODE (*p) == NON_LVALUE_EXPR)
13142 p = &TREE_OPERAND (*p, 0);
13143 tree d = *p;
13144 if (is_variable_sized (d))
13146 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13147 d = DECL_VALUE_EXPR (d);
13148 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13149 d = TREE_OPERAND (d, 0);
13150 gcc_assert (DECL_P (d));
13152 splay_tree_key key
13153 = (splay_tree_key) &DECL_CONTEXT (d);
13154 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13155 key)->value;
13156 if (d == *p)
13157 *p = nd;
13158 else
13159 *p = build_fold_indirect_ref (nd);
13161 v = build_fold_addr_expr_with_type (v, ptr_type_node);
13162 gimplify_assign (x, v, &ilist);
13163 nc = NULL_TREE;
13166 else
13168 if (DECL_SIZE (ovar)
13169 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
13171 tree ovar2 = DECL_VALUE_EXPR (ovar);
13172 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
13173 ovar2 = TREE_OPERAND (ovar2, 0);
13174 gcc_assert (DECL_P (ovar2));
13175 ovar = ovar2;
13177 if (!maybe_lookup_field (ovar, ctx)
13178 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13179 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13180 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
13181 continue;
13184 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
13185 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
13186 talign = DECL_ALIGN_UNIT (ovar);
13188 var = NULL_TREE;
13189 if (nc)
13191 if (in_reduction_clauses
13192 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13193 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13195 tree d = ovar;
13196 if (is_variable_sized (d))
13198 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13199 d = DECL_VALUE_EXPR (d);
13200 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13201 d = TREE_OPERAND (d, 0);
13202 gcc_assert (DECL_P (d));
13204 splay_tree_key key
13205 = (splay_tree_key) &DECL_CONTEXT (d);
13206 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13207 key)->value;
13208 if (d == ovar)
13209 var = nd;
13210 else
13211 var = build_fold_indirect_ref (nd);
13213 else
13214 var = lookup_decl_in_outer_ctx (ovar, ctx);
13216 if (nc
13217 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13218 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13219 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13220 && is_omp_target (stmt))
13222 x = build_sender_ref (c, ctx);
13223 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13225 else if (nc)
13227 x = build_sender_ref (ovar, ctx);
13229 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13230 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13231 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13232 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13234 gcc_assert (offloaded);
13235 tree avar
13236 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13237 mark_addressable (avar);
13238 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13239 talign = DECL_ALIGN_UNIT (avar);
13240 avar = build_fold_addr_expr (avar);
13241 gimplify_assign (x, avar, &ilist);
13243 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13245 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13246 if (!omp_privatize_by_reference (var))
13248 if (is_gimple_reg (var)
13249 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13250 suppress_warning (var);
13251 var = build_fold_addr_expr (var);
13253 else
13254 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13255 gimplify_assign (x, var, &ilist);
13257 else if (is_gimple_reg (var))
13259 gcc_assert (offloaded);
13260 tree avar = create_tmp_var (TREE_TYPE (var));
13261 mark_addressable (avar);
13262 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13263 if (GOMP_MAP_COPY_TO_P (map_kind)
13264 || map_kind == GOMP_MAP_POINTER
13265 || map_kind == GOMP_MAP_TO_PSET
13266 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13268 /* If we need to initialize a temporary
13269 with VAR because it is not addressable, and
13270 the variable hasn't been initialized yet, then
13271 we'll get a warning for the store to avar.
13272 Don't warn in that case, the mapping might
13273 be implicit. */
13274 suppress_warning (var, OPT_Wuninitialized);
13275 gimplify_assign (avar, var, &ilist);
13277 avar = build_fold_addr_expr (avar);
13278 gimplify_assign (x, avar, &ilist);
13279 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13280 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13281 && !TYPE_READONLY (TREE_TYPE (var)))
13283 x = unshare_expr (x);
13284 x = build_simple_mem_ref (x);
13285 gimplify_assign (var, x, &olist);
13288 else
13290 /* While MAP is handled explicitly by the FE,
13291 for 'target update', only the identified is passed. */
13292 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13293 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13294 && (omp_is_allocatable_or_ptr (var)
13295 && omp_check_optional_argument (var, false)))
13296 var = build_fold_indirect_ref (var);
13297 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13298 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13299 || (!omp_is_allocatable_or_ptr (var)
13300 && !omp_check_optional_argument (var, false)))
13301 var = build_fold_addr_expr (var);
13302 gimplify_assign (x, var, &ilist);
13305 s = NULL_TREE;
13306 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13308 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13309 s = TREE_TYPE (ovar);
13310 if (TREE_CODE (s) == REFERENCE_TYPE
13311 || omp_check_optional_argument (ovar, false))
13312 s = TREE_TYPE (s);
13313 s = TYPE_SIZE_UNIT (s);
13315 else
13316 s = OMP_CLAUSE_SIZE (c);
13317 if (s == NULL_TREE)
13318 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13319 s = fold_convert (size_type_node, s);
13320 purpose = size_int (map_idx++);
13321 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13322 if (TREE_CODE (s) != INTEGER_CST)
13323 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13325 unsigned HOST_WIDE_INT tkind, tkind_zero;
13326 switch (OMP_CLAUSE_CODE (c))
13328 case OMP_CLAUSE_MAP:
13329 tkind = OMP_CLAUSE_MAP_KIND (c);
13330 tkind_zero = tkind;
13331 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13332 switch (tkind)
13334 case GOMP_MAP_ALLOC:
13335 case GOMP_MAP_IF_PRESENT:
13336 case GOMP_MAP_TO:
13337 case GOMP_MAP_FROM:
13338 case GOMP_MAP_TOFROM:
13339 case GOMP_MAP_ALWAYS_TO:
13340 case GOMP_MAP_ALWAYS_FROM:
13341 case GOMP_MAP_ALWAYS_TOFROM:
13342 case GOMP_MAP_RELEASE:
13343 case GOMP_MAP_FORCE_TO:
13344 case GOMP_MAP_FORCE_FROM:
13345 case GOMP_MAP_FORCE_TOFROM:
13346 case GOMP_MAP_FORCE_PRESENT:
13347 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13348 break;
13349 case GOMP_MAP_DELETE:
13350 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13351 default:
13352 break;
13354 if (tkind_zero != tkind)
13356 if (integer_zerop (s))
13357 tkind = tkind_zero;
13358 else if (integer_nonzerop (s))
13359 tkind_zero = tkind;
13361 if (tkind_zero == tkind
13362 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c)
13363 && (((tkind & GOMP_MAP_FLAG_SPECIAL_BITS)
13364 & ~GOMP_MAP_IMPLICIT)
13365 == 0))
13367 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13368 bits are not interfered by other special bit encodings,
13369 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13370 to see. */
13371 tkind |= GOMP_MAP_IMPLICIT;
13372 tkind_zero = tkind;
13374 break;
13375 case OMP_CLAUSE_FIRSTPRIVATE:
13376 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13377 tkind = GOMP_MAP_TO;
13378 tkind_zero = tkind;
13379 break;
13380 case OMP_CLAUSE_TO:
13381 tkind = GOMP_MAP_TO;
13382 tkind_zero = tkind;
13383 break;
13384 case OMP_CLAUSE_FROM:
13385 tkind = GOMP_MAP_FROM;
13386 tkind_zero = tkind;
13387 break;
13388 default:
13389 gcc_unreachable ();
13391 gcc_checking_assert (tkind
13392 < (HOST_WIDE_INT_C (1U) << talign_shift));
13393 gcc_checking_assert (tkind_zero
13394 < (HOST_WIDE_INT_C (1U) << talign_shift));
13395 talign = ceil_log2 (talign);
13396 tkind |= talign << talign_shift;
13397 tkind_zero |= talign << talign_shift;
13398 gcc_checking_assert (tkind
13399 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13400 gcc_checking_assert (tkind_zero
13401 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13402 if (tkind == tkind_zero)
13403 x = build_int_cstu (tkind_type, tkind);
13404 else
13406 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13407 x = build3 (COND_EXPR, tkind_type,
13408 fold_build2 (EQ_EXPR, boolean_type_node,
13409 unshare_expr (s), size_zero_node),
13410 build_int_cstu (tkind_type, tkind_zero),
13411 build_int_cstu (tkind_type, tkind));
13413 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13414 if (nc && nc != c)
13415 c = nc;
13416 break;
13418 case OMP_CLAUSE_FIRSTPRIVATE:
13419 omp_has_device_addr_descr:
13420 if (is_gimple_omp_oacc (ctx->stmt))
13421 goto oacc_firstprivate_map;
13422 ovar = OMP_CLAUSE_DECL (c);
13423 if (omp_privatize_by_reference (ovar))
13424 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13425 else
13426 talign = DECL_ALIGN_UNIT (ovar);
13427 var = lookup_decl_in_outer_ctx (ovar, ctx);
13428 x = build_sender_ref (ovar, ctx);
13429 tkind = GOMP_MAP_FIRSTPRIVATE;
13430 type = TREE_TYPE (ovar);
13431 if (omp_privatize_by_reference (ovar))
13432 type = TREE_TYPE (type);
13433 if ((INTEGRAL_TYPE_P (type)
13434 && TYPE_PRECISION (type) <= POINTER_SIZE)
13435 || TREE_CODE (type) == POINTER_TYPE)
13437 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13438 tree t = var;
13439 if (omp_privatize_by_reference (var))
13440 t = build_simple_mem_ref (var);
13441 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13442 suppress_warning (var);
13443 if (TREE_CODE (type) != POINTER_TYPE)
13444 t = fold_convert (pointer_sized_int_node, t);
13445 t = fold_convert (TREE_TYPE (x), t);
13446 gimplify_assign (x, t, &ilist);
13448 else if (omp_privatize_by_reference (var))
13449 gimplify_assign (x, var, &ilist);
13450 else if (is_gimple_reg (var))
13452 tree avar = create_tmp_var (TREE_TYPE (var));
13453 mark_addressable (avar);
13454 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13455 suppress_warning (var);
13456 gimplify_assign (avar, var, &ilist);
13457 avar = build_fold_addr_expr (avar);
13458 gimplify_assign (x, avar, &ilist);
13460 else
13462 var = build_fold_addr_expr (var);
13463 gimplify_assign (x, var, &ilist);
13465 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13466 s = size_int (0);
13467 else if (omp_privatize_by_reference (ovar))
13468 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13469 else
13470 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13471 s = fold_convert (size_type_node, s);
13472 purpose = size_int (map_idx++);
13473 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13474 if (TREE_CODE (s) != INTEGER_CST)
13475 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13477 gcc_checking_assert (tkind
13478 < (HOST_WIDE_INT_C (1U) << talign_shift));
13479 talign = ceil_log2 (talign);
13480 tkind |= talign << talign_shift;
13481 gcc_checking_assert (tkind
13482 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13483 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13484 build_int_cstu (tkind_type, tkind));
13485 /* Fortran array descriptors: firstprivate of data + attach. */
13486 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13487 && lang_hooks.decls.omp_array_data (ovar, true))
13489 tree not_null_lb, null_lb, after_lb;
13490 tree var1, var2, size1, size2;
13491 tree present = omp_check_optional_argument (ovar, true);
13492 if (present)
13494 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13495 not_null_lb = create_artificial_label (clause_loc);
13496 null_lb = create_artificial_label (clause_loc);
13497 after_lb = create_artificial_label (clause_loc);
13498 gimple_seq seq = NULL;
13499 present = force_gimple_operand (present, &seq, true,
13500 NULL_TREE);
13501 gimple_seq_add_seq (&ilist, seq);
13502 gimple_seq_add_stmt (&ilist,
13503 gimple_build_cond_from_tree (present,
13504 not_null_lb, null_lb));
13505 gimple_seq_add_stmt (&ilist,
13506 gimple_build_label (not_null_lb));
13508 var1 = lang_hooks.decls.omp_array_data (var, false);
13509 size1 = lang_hooks.decls.omp_array_size (var, &ilist);
13510 var2 = build_fold_addr_expr (x);
13511 if (!POINTER_TYPE_P (TREE_TYPE (var)))
13512 var = build_fold_addr_expr (var);
13513 size2 = fold_build2 (POINTER_DIFF_EXPR, ssizetype,
13514 build_fold_addr_expr (var1), var);
13515 size2 = fold_convert (sizetype, size2);
13516 if (present)
13518 tree tmp = create_tmp_var (TREE_TYPE (var1));
13519 gimplify_assign (tmp, var1, &ilist);
13520 var1 = tmp;
13521 tmp = create_tmp_var (TREE_TYPE (var2));
13522 gimplify_assign (tmp, var2, &ilist);
13523 var2 = tmp;
13524 tmp = create_tmp_var (TREE_TYPE (size1));
13525 gimplify_assign (tmp, size1, &ilist);
13526 size1 = tmp;
13527 tmp = create_tmp_var (TREE_TYPE (size2));
13528 gimplify_assign (tmp, size2, &ilist);
13529 size2 = tmp;
13530 gimple_seq_add_stmt (&ilist, gimple_build_goto (after_lb));
13531 gimple_seq_add_stmt (&ilist, gimple_build_label (null_lb));
13532 gimplify_assign (var1, null_pointer_node, &ilist);
13533 gimplify_assign (var2, null_pointer_node, &ilist);
13534 gimplify_assign (size1, size_zero_node, &ilist);
13535 gimplify_assign (size2, size_zero_node, &ilist);
13536 gimple_seq_add_stmt (&ilist, gimple_build_label (after_lb));
13538 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13539 gimplify_assign (x, var1, &ilist);
13540 tkind = GOMP_MAP_FIRSTPRIVATE;
13541 talign = DECL_ALIGN_UNIT (ovar);
13542 talign = ceil_log2 (talign);
13543 tkind |= talign << talign_shift;
13544 gcc_checking_assert (tkind
13545 <= tree_to_uhwi (
13546 TYPE_MAX_VALUE (tkind_type)));
13547 purpose = size_int (map_idx++);
13548 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size1);
13549 if (TREE_CODE (size1) != INTEGER_CST)
13550 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13551 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13552 build_int_cstu (tkind_type, tkind));
13553 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13554 gimplify_assign (x, var2, &ilist);
13555 tkind = GOMP_MAP_ATTACH;
13556 purpose = size_int (map_idx++);
13557 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size2);
13558 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13559 build_int_cstu (tkind_type, tkind));
13561 break;
13563 case OMP_CLAUSE_USE_DEVICE_PTR:
13564 case OMP_CLAUSE_USE_DEVICE_ADDR:
13565 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13566 case OMP_CLAUSE_IS_DEVICE_PTR:
13567 ovar = OMP_CLAUSE_DECL (c);
13568 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13570 if (lang_hooks.decls.omp_array_data (ovar, true))
13571 goto omp_has_device_addr_descr;
13572 while (TREE_CODE (ovar) == INDIRECT_REF
13573 || TREE_CODE (ovar) == ARRAY_REF)
13574 ovar = TREE_OPERAND (ovar, 0);
13576 var = lookup_decl_in_outer_ctx (ovar, ctx);
13578 if (lang_hooks.decls.omp_array_data (ovar, true))
13580 tkind = ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13581 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13582 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13583 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13585 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13586 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13588 tkind = GOMP_MAP_USE_DEVICE_PTR;
13589 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13591 else
13593 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13594 x = build_sender_ref (ovar, ctx);
13597 if (is_gimple_omp_oacc (ctx->stmt))
13599 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13601 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13602 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13605 type = TREE_TYPE (ovar);
13606 if (lang_hooks.decls.omp_array_data (ovar, true))
13607 var = lang_hooks.decls.omp_array_data (var, false);
13608 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13609 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13610 && !omp_privatize_by_reference (ovar)
13611 && !omp_is_allocatable_or_ptr (ovar))
13612 || TREE_CODE (type) == ARRAY_TYPE)
13613 var = build_fold_addr_expr (var);
13614 else
13616 if (omp_privatize_by_reference (ovar)
13617 || omp_check_optional_argument (ovar, false)
13618 || omp_is_allocatable_or_ptr (ovar))
13620 type = TREE_TYPE (type);
13621 if (POINTER_TYPE_P (type)
13622 && TREE_CODE (type) != ARRAY_TYPE
13623 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13624 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13625 && !omp_is_allocatable_or_ptr (ovar))
13626 || (omp_privatize_by_reference (ovar)
13627 && omp_is_allocatable_or_ptr (ovar))))
13628 var = build_simple_mem_ref (var);
13629 var = fold_convert (TREE_TYPE (x), var);
13632 tree present;
13633 present = omp_check_optional_argument (ovar, true);
13634 if (present)
13636 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13637 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13638 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13639 tree new_x = unshare_expr (x);
13640 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13641 fb_rvalue);
13642 gcond *cond = gimple_build_cond_from_tree (present,
13643 notnull_label,
13644 null_label);
13645 gimple_seq_add_stmt (&ilist, cond);
13646 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13647 gimplify_assign (new_x, null_pointer_node, &ilist);
13648 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13649 gimple_seq_add_stmt (&ilist,
13650 gimple_build_label (notnull_label));
13651 gimplify_assign (x, var, &ilist);
13652 gimple_seq_add_stmt (&ilist,
13653 gimple_build_label (opt_arg_label));
13655 else
13656 gimplify_assign (x, var, &ilist);
13657 s = size_int (0);
13658 purpose = size_int (map_idx++);
13659 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13660 gcc_checking_assert (tkind
13661 < (HOST_WIDE_INT_C (1U) << talign_shift));
13662 gcc_checking_assert (tkind
13663 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13664 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13665 build_int_cstu (tkind_type, tkind));
13666 break;
13669 gcc_assert (map_idx == map_cnt);
13671 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13672 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13673 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13674 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13675 for (int i = 1; i <= 2; i++)
13676 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13678 gimple_seq initlist = NULL;
13679 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13680 TREE_VEC_ELT (t, i)),
13681 &initlist, true, NULL_TREE);
13682 gimple_seq_add_seq (&ilist, initlist);
13684 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13685 gimple_seq_add_stmt (&olist,
13686 gimple_build_assign (TREE_VEC_ELT (t, i),
13687 clobber));
13689 else if (omp_maybe_offloaded_ctx (ctx->outer))
13691 tree id = get_identifier ("omp declare target");
13692 tree decl = TREE_VEC_ELT (t, i);
13693 DECL_ATTRIBUTES (decl)
13694 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13695 varpool_node *node = varpool_node::get (decl);
13696 if (node)
13698 node->offloadable = 1;
13699 if (ENABLE_OFFLOADING)
13701 g->have_offload = true;
13702 vec_safe_push (offload_vars, t);
13707 tree clobber = build_clobber (ctx->record_type);
13708 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13709 clobber));
13712 /* Once all the expansions are done, sequence all the different
13713 fragments inside gimple_omp_body. */
13715 new_body = NULL;
13717 if (offloaded
13718 && ctx->record_type)
13720 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13721 /* fixup_child_record_type might have changed receiver_decl's type. */
13722 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13723 gimple_seq_add_stmt (&new_body,
13724 gimple_build_assign (ctx->receiver_decl, t));
13726 gimple_seq_add_seq (&new_body, fplist);
13728 if (offloaded || data_region)
13730 tree prev = NULL_TREE;
13731 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13732 switch (OMP_CLAUSE_CODE (c))
13734 tree var, x;
13735 default:
13736 break;
13737 case OMP_CLAUSE_FIRSTPRIVATE:
13738 omp_firstprivatize_data_region:
13739 if (is_gimple_omp_oacc (ctx->stmt))
13740 break;
13741 var = OMP_CLAUSE_DECL (c);
13742 if (omp_privatize_by_reference (var)
13743 || is_gimple_reg_type (TREE_TYPE (var)))
13745 tree new_var = lookup_decl (var, ctx);
13746 tree type;
13747 type = TREE_TYPE (var);
13748 if (omp_privatize_by_reference (var))
13749 type = TREE_TYPE (type);
13750 if ((INTEGRAL_TYPE_P (type)
13751 && TYPE_PRECISION (type) <= POINTER_SIZE)
13752 || TREE_CODE (type) == POINTER_TYPE)
13754 x = build_receiver_ref (var, false, ctx);
13755 if (TREE_CODE (type) != POINTER_TYPE)
13756 x = fold_convert (pointer_sized_int_node, x);
13757 x = fold_convert (type, x);
13758 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13759 fb_rvalue);
13760 if (omp_privatize_by_reference (var))
13762 tree v = create_tmp_var_raw (type, get_name (var));
13763 gimple_add_tmp_var (v);
13764 TREE_ADDRESSABLE (v) = 1;
13765 gimple_seq_add_stmt (&new_body,
13766 gimple_build_assign (v, x));
13767 x = build_fold_addr_expr (v);
13769 gimple_seq_add_stmt (&new_body,
13770 gimple_build_assign (new_var, x));
13772 else
13774 bool by_ref = !omp_privatize_by_reference (var);
13775 x = build_receiver_ref (var, by_ref, ctx);
13776 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13777 fb_rvalue);
13778 gimple_seq_add_stmt (&new_body,
13779 gimple_build_assign (new_var, x));
13782 else if (is_variable_sized (var))
13784 tree pvar = DECL_VALUE_EXPR (var);
13785 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13786 pvar = TREE_OPERAND (pvar, 0);
13787 gcc_assert (DECL_P (pvar));
13788 tree new_var = lookup_decl (pvar, ctx);
13789 x = build_receiver_ref (var, false, ctx);
13790 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13791 gimple_seq_add_stmt (&new_body,
13792 gimple_build_assign (new_var, x));
13794 break;
13795 case OMP_CLAUSE_PRIVATE:
13796 if (is_gimple_omp_oacc (ctx->stmt))
13797 break;
13798 var = OMP_CLAUSE_DECL (c);
13799 if (omp_privatize_by_reference (var))
13801 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13802 tree new_var = lookup_decl (var, ctx);
13803 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13804 if (TREE_CONSTANT (x))
13806 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13807 get_name (var));
13808 gimple_add_tmp_var (x);
13809 TREE_ADDRESSABLE (x) = 1;
13810 x = build_fold_addr_expr_loc (clause_loc, x);
13812 else
13813 break;
13815 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13816 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13817 gimple_seq_add_stmt (&new_body,
13818 gimple_build_assign (new_var, x));
13820 break;
13821 case OMP_CLAUSE_USE_DEVICE_PTR:
13822 case OMP_CLAUSE_USE_DEVICE_ADDR:
13823 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13824 case OMP_CLAUSE_IS_DEVICE_PTR:
13825 tree new_var;
13826 gimple_seq assign_body;
13827 bool is_array_data;
13828 bool do_optional_check;
13829 assign_body = NULL;
13830 do_optional_check = false;
13831 var = OMP_CLAUSE_DECL (c);
13832 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13833 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR && is_array_data)
13834 goto omp_firstprivatize_data_region;
13836 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13837 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13838 x = build_sender_ref (is_array_data
13839 ? (splay_tree_key) &DECL_NAME (var)
13840 : (splay_tree_key) &DECL_UID (var), ctx);
13841 else
13843 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13845 while (TREE_CODE (var) == INDIRECT_REF
13846 || TREE_CODE (var) == ARRAY_REF)
13847 var = TREE_OPERAND (var, 0);
13849 x = build_receiver_ref (var, false, ctx);
13852 if (is_array_data)
13854 bool is_ref = omp_privatize_by_reference (var);
13855 do_optional_check = true;
13856 /* First, we copy the descriptor data from the host; then
13857 we update its data to point to the target address. */
13858 new_var = lookup_decl (var, ctx);
13859 new_var = DECL_VALUE_EXPR (new_var);
13860 tree v = new_var;
13861 tree v2 = var;
13862 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR
13863 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR)
13864 v2 = maybe_lookup_decl_in_outer_ctx (var, ctx);
13866 if (is_ref)
13868 v2 = build_fold_indirect_ref (v2);
13869 v = create_tmp_var_raw (TREE_TYPE (v2), get_name (var));
13870 gimple_add_tmp_var (v);
13871 TREE_ADDRESSABLE (v) = 1;
13872 gimplify_assign (v, v2, &assign_body);
13873 tree rhs = build_fold_addr_expr (v);
13874 gimple_seq_add_stmt (&assign_body,
13875 gimple_build_assign (new_var, rhs));
13877 else
13878 gimplify_assign (new_var, v2, &assign_body);
13880 v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13881 gcc_assert (v2);
13882 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13883 gimple_seq_add_stmt (&assign_body,
13884 gimple_build_assign (v2, x));
13886 else if (is_variable_sized (var))
13888 tree pvar = DECL_VALUE_EXPR (var);
13889 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13890 pvar = TREE_OPERAND (pvar, 0);
13891 gcc_assert (DECL_P (pvar));
13892 new_var = lookup_decl (pvar, ctx);
13893 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13894 gimple_seq_add_stmt (&assign_body,
13895 gimple_build_assign (new_var, x));
13897 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13898 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13899 && !omp_privatize_by_reference (var)
13900 && !omp_is_allocatable_or_ptr (var))
13901 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13903 new_var = lookup_decl (var, ctx);
13904 new_var = DECL_VALUE_EXPR (new_var);
13905 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13906 new_var = TREE_OPERAND (new_var, 0);
13907 gcc_assert (DECL_P (new_var));
13908 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13909 gimple_seq_add_stmt (&assign_body,
13910 gimple_build_assign (new_var, x));
13912 else
13914 tree type = TREE_TYPE (var);
13915 new_var = lookup_decl (var, ctx);
13916 if (omp_privatize_by_reference (var))
13918 type = TREE_TYPE (type);
13919 if (POINTER_TYPE_P (type)
13920 && TREE_CODE (type) != ARRAY_TYPE
13921 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13922 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13923 || (omp_privatize_by_reference (var)
13924 && omp_is_allocatable_or_ptr (var))))
13926 tree v = create_tmp_var_raw (type, get_name (var));
13927 gimple_add_tmp_var (v);
13928 TREE_ADDRESSABLE (v) = 1;
13929 x = fold_convert (type, x);
13930 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13931 fb_rvalue);
13932 gimple_seq_add_stmt (&assign_body,
13933 gimple_build_assign (v, x));
13934 x = build_fold_addr_expr (v);
13935 do_optional_check = true;
13938 new_var = DECL_VALUE_EXPR (new_var);
13939 x = fold_convert (TREE_TYPE (new_var), x);
13940 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13941 gimple_seq_add_stmt (&assign_body,
13942 gimple_build_assign (new_var, x));
13944 tree present;
13945 present = ((do_optional_check
13946 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13947 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13948 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13949 : NULL_TREE);
13950 if (present)
13952 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13953 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13954 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13955 glabel *null_glabel = gimple_build_label (null_label);
13956 glabel *notnull_glabel = gimple_build_label (notnull_label);
13957 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13958 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13959 fb_rvalue);
13960 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13961 fb_rvalue);
13962 gcond *cond = gimple_build_cond_from_tree (present,
13963 notnull_label,
13964 null_label);
13965 gimple_seq_add_stmt (&new_body, cond);
13966 gimple_seq_add_stmt (&new_body, null_glabel);
13967 gimplify_assign (new_var, null_pointer_node, &new_body);
13968 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13969 gimple_seq_add_stmt (&new_body, notnull_glabel);
13970 gimple_seq_add_seq (&new_body, assign_body);
13971 gimple_seq_add_stmt (&new_body,
13972 gimple_build_label (opt_arg_label));
13974 else
13975 gimple_seq_add_seq (&new_body, assign_body);
13976 break;
13978 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13979 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13980 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13981 or references to VLAs. */
13982 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13983 switch (OMP_CLAUSE_CODE (c))
13985 tree var;
13986 default:
13987 break;
13988 case OMP_CLAUSE_MAP:
13989 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13990 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13992 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13993 poly_int64 offset = 0;
13994 gcc_assert (prev);
13995 var = OMP_CLAUSE_DECL (c);
13996 if (DECL_P (var)
13997 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13998 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13999 ctx))
14000 && varpool_node::get_create (var)->offloadable)
14001 break;
14002 if (TREE_CODE (var) == INDIRECT_REF
14003 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
14004 var = TREE_OPERAND (var, 0);
14005 if (TREE_CODE (var) == COMPONENT_REF)
14007 var = get_addr_base_and_unit_offset (var, &offset);
14008 gcc_assert (var != NULL_TREE && DECL_P (var));
14010 else if (DECL_SIZE (var)
14011 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
14013 tree var2 = DECL_VALUE_EXPR (var);
14014 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
14015 var2 = TREE_OPERAND (var2, 0);
14016 gcc_assert (DECL_P (var2));
14017 var = var2;
14019 tree new_var = lookup_decl (var, ctx), x;
14020 tree type = TREE_TYPE (new_var);
14021 bool is_ref;
14022 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
14023 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
14024 == COMPONENT_REF))
14026 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
14027 is_ref = true;
14028 new_var = build2 (MEM_REF, type,
14029 build_fold_addr_expr (new_var),
14030 build_int_cst (build_pointer_type (type),
14031 offset));
14033 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
14035 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
14036 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
14037 new_var = build2 (MEM_REF, type,
14038 build_fold_addr_expr (new_var),
14039 build_int_cst (build_pointer_type (type),
14040 offset));
14042 else
14043 is_ref = omp_privatize_by_reference (var);
14044 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
14045 is_ref = false;
14046 bool ref_to_array = false;
14047 bool ref_to_ptr = false;
14048 if (is_ref)
14050 type = TREE_TYPE (type);
14051 if (TREE_CODE (type) == ARRAY_TYPE)
14053 type = build_pointer_type (type);
14054 ref_to_array = true;
14057 else if (TREE_CODE (type) == ARRAY_TYPE)
14059 tree decl2 = DECL_VALUE_EXPR (new_var);
14060 gcc_assert (TREE_CODE (decl2) == MEM_REF);
14061 decl2 = TREE_OPERAND (decl2, 0);
14062 gcc_assert (DECL_P (decl2));
14063 new_var = decl2;
14064 type = TREE_TYPE (new_var);
14066 else if (TREE_CODE (type) == REFERENCE_TYPE
14067 && TREE_CODE (TREE_TYPE (type)) == POINTER_TYPE)
14069 type = TREE_TYPE (type);
14070 ref_to_ptr = true;
14072 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
14073 x = fold_convert_loc (clause_loc, type, x);
14074 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
14076 tree bias = OMP_CLAUSE_SIZE (c);
14077 if (DECL_P (bias))
14078 bias = lookup_decl (bias, ctx);
14079 bias = fold_convert_loc (clause_loc, sizetype, bias);
14080 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
14081 bias);
14082 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
14083 TREE_TYPE (x), x, bias);
14085 if (ref_to_array)
14086 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
14087 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14088 if ((is_ref && !ref_to_array)
14089 || ref_to_ptr)
14091 tree t = create_tmp_var_raw (type, get_name (var));
14092 gimple_add_tmp_var (t);
14093 TREE_ADDRESSABLE (t) = 1;
14094 gimple_seq_add_stmt (&new_body,
14095 gimple_build_assign (t, x));
14096 x = build_fold_addr_expr_loc (clause_loc, t);
14098 gimple_seq_add_stmt (&new_body,
14099 gimple_build_assign (new_var, x));
14100 prev = NULL_TREE;
14102 else if (OMP_CLAUSE_CHAIN (c)
14103 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
14104 == OMP_CLAUSE_MAP
14105 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
14106 == GOMP_MAP_FIRSTPRIVATE_POINTER
14107 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
14108 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
14109 prev = c;
14110 break;
14111 case OMP_CLAUSE_PRIVATE:
14112 var = OMP_CLAUSE_DECL (c);
14113 if (is_variable_sized (var))
14115 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14116 tree new_var = lookup_decl (var, ctx);
14117 tree pvar = DECL_VALUE_EXPR (var);
14118 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
14119 pvar = TREE_OPERAND (pvar, 0);
14120 gcc_assert (DECL_P (pvar));
14121 tree new_pvar = lookup_decl (pvar, ctx);
14122 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
14123 tree al = size_int (DECL_ALIGN (var));
14124 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
14125 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14126 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
14127 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14128 gimple_seq_add_stmt (&new_body,
14129 gimple_build_assign (new_pvar, x));
14131 else if (omp_privatize_by_reference (var)
14132 && !is_gimple_omp_oacc (ctx->stmt))
14134 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14135 tree new_var = lookup_decl (var, ctx);
14136 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
14137 if (TREE_CONSTANT (x))
14138 break;
14139 else
14141 tree atmp
14142 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
14143 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
14144 tree al = size_int (TYPE_ALIGN (rtype));
14145 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14148 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
14149 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14150 gimple_seq_add_stmt (&new_body,
14151 gimple_build_assign (new_var, x));
14153 break;
14156 gimple_seq fork_seq = NULL;
14157 gimple_seq join_seq = NULL;
14159 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
14161 /* If there are reductions on the offloaded region itself, treat
14162 them as a dummy GANG loop. */
14163 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
14165 gcall *private_marker = lower_oacc_private_marker (ctx);
14167 if (private_marker)
14168 gimple_call_set_arg (private_marker, 2, level);
14170 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
14171 false, NULL, private_marker, NULL, &fork_seq,
14172 &join_seq, ctx);
14175 gimple_seq_add_seq (&new_body, fork_seq);
14176 gimple_seq_add_seq (&new_body, tgt_body);
14177 gimple_seq_add_seq (&new_body, join_seq);
14179 if (offloaded)
14181 new_body = maybe_catch_exception (new_body);
14182 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
14184 gimple_omp_set_body (stmt, new_body);
14187 bind = gimple_build_bind (NULL, NULL,
14188 tgt_bind ? gimple_bind_block (tgt_bind)
14189 : NULL_TREE);
14190 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
14191 gimple_bind_add_seq (bind, ilist);
14192 gimple_bind_add_stmt (bind, stmt);
14193 gimple_bind_add_seq (bind, olist);
14195 pop_gimplify_context (NULL);
14197 if (dep_bind)
14199 gimple_bind_add_seq (dep_bind, dep_ilist);
14200 gimple_bind_add_stmt (dep_bind, bind);
14201 gimple_bind_add_seq (dep_bind, dep_olist);
14202 pop_gimplify_context (dep_bind);
14206 /* Expand code for an OpenMP teams directive. */
14208 static void
14209 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14211 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
14212 push_gimplify_context ();
14214 tree block = make_node (BLOCK);
14215 gbind *bind = gimple_build_bind (NULL, NULL, block);
14216 gsi_replace (gsi_p, bind, true);
14217 gimple_seq bind_body = NULL;
14218 gimple_seq dlist = NULL;
14219 gimple_seq olist = NULL;
14221 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14222 OMP_CLAUSE_NUM_TEAMS);
14223 tree num_teams_lower = NULL_TREE;
14224 if (num_teams == NULL_TREE)
14225 num_teams = build_int_cst (unsigned_type_node, 0);
14226 else
14228 num_teams_lower = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams);
14229 if (num_teams_lower)
14231 num_teams_lower = fold_convert (unsigned_type_node, num_teams_lower);
14232 gimplify_expr (&num_teams_lower, &bind_body, NULL, is_gimple_val,
14233 fb_rvalue);
14235 num_teams = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams);
14236 num_teams = fold_convert (unsigned_type_node, num_teams);
14237 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
14239 if (num_teams_lower == NULL_TREE)
14240 num_teams_lower = num_teams;
14241 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14242 OMP_CLAUSE_THREAD_LIMIT);
14243 if (thread_limit == NULL_TREE)
14244 thread_limit = build_int_cst (unsigned_type_node, 0);
14245 else
14247 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
14248 thread_limit = fold_convert (unsigned_type_node, thread_limit);
14249 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
14250 fb_rvalue);
14252 location_t loc = gimple_location (teams_stmt);
14253 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4);
14254 tree rettype = TREE_TYPE (TREE_TYPE (decl));
14255 tree first = create_tmp_var (rettype);
14256 gimple_seq_add_stmt (&bind_body,
14257 gimple_build_assign (first, build_one_cst (rettype)));
14258 tree llabel = create_artificial_label (loc);
14259 gimple_seq_add_stmt (&bind_body, gimple_build_label (llabel));
14260 gimple *call
14261 = gimple_build_call (decl, 4, num_teams_lower, num_teams, thread_limit,
14262 first);
14263 gimple_set_location (call, loc);
14264 tree temp = create_tmp_var (rettype);
14265 gimple_call_set_lhs (call, temp);
14266 gimple_seq_add_stmt (&bind_body, call);
14268 tree tlabel = create_artificial_label (loc);
14269 tree flabel = create_artificial_label (loc);
14270 gimple *cond = gimple_build_cond (NE_EXPR, temp, build_zero_cst (rettype),
14271 tlabel, flabel);
14272 gimple_seq_add_stmt (&bind_body, cond);
14273 gimple_seq_add_stmt (&bind_body, gimple_build_label (tlabel));
14274 gimple_seq_add_stmt (&bind_body,
14275 gimple_build_assign (first, build_zero_cst (rettype)));
14277 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
14278 &bind_body, &dlist, ctx, NULL);
14279 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
14280 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
14281 NULL, ctx);
14282 gimple_seq_add_stmt (&bind_body, teams_stmt);
14284 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
14285 gimple_omp_set_body (teams_stmt, NULL);
14286 gimple_seq_add_seq (&bind_body, olist);
14287 gimple_seq_add_seq (&bind_body, dlist);
14288 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
14289 gimple_seq_add_stmt (&bind_body, gimple_build_goto (llabel));
14290 gimple_seq_add_stmt (&bind_body, gimple_build_label (flabel));
14291 gimple_bind_set_body (bind, bind_body);
14293 pop_gimplify_context (bind);
14295 gimple_bind_append_vars (bind, ctx->block_vars);
14296 BLOCK_VARS (block) = ctx->block_vars;
14297 if (BLOCK_VARS (block))
14298 TREE_USED (block) = 1;
14301 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14302 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14303 of OMP context, but with make_addressable_vars set. */
14305 static tree
14306 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
14307 void *data)
14309 tree t = *tp;
14311 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14312 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
14313 && data == NULL
14314 && DECL_HAS_VALUE_EXPR_P (t))
14315 return t;
14317 if (make_addressable_vars
14318 && DECL_P (t)
14319 && bitmap_bit_p (make_addressable_vars, DECL_UID (t)))
14320 return t;
14322 /* If a global variable has been privatized, TREE_CONSTANT on
14323 ADDR_EXPR might be wrong. */
14324 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
14325 recompute_tree_invariant_for_addr_expr (t);
14327 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
14328 return NULL_TREE;
14331 /* Data to be communicated between lower_omp_regimplify_operands and
14332 lower_omp_regimplify_operands_p. */
14334 struct lower_omp_regimplify_operands_data
14336 omp_context *ctx;
14337 vec<tree> *decls;
14340 /* Helper function for lower_omp_regimplify_operands. Find
14341 omp_member_access_dummy_var vars and adjust temporarily their
14342 DECL_VALUE_EXPRs if needed. */
14344 static tree
14345 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
14346 void *data)
14348 tree t = omp_member_access_dummy_var (*tp);
14349 if (t)
14351 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
14352 lower_omp_regimplify_operands_data *ldata
14353 = (lower_omp_regimplify_operands_data *) wi->info;
14354 tree o = maybe_lookup_decl (t, ldata->ctx);
14355 if (o != t)
14357 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
14358 ldata->decls->safe_push (*tp);
14359 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
14360 SET_DECL_VALUE_EXPR (*tp, v);
14363 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
14364 return NULL_TREE;
14367 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14368 of omp_member_access_dummy_var vars during regimplification. */
14370 static void
14371 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
14372 gimple_stmt_iterator *gsi_p)
14374 auto_vec<tree, 10> decls;
14375 if (ctx)
14377 struct walk_stmt_info wi;
14378 memset (&wi, '\0', sizeof (wi));
14379 struct lower_omp_regimplify_operands_data data;
14380 data.ctx = ctx;
14381 data.decls = &decls;
14382 wi.info = &data;
14383 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14385 gimple_regimplify_operands (stmt, gsi_p);
14386 while (!decls.is_empty ())
14388 tree t = decls.pop ();
14389 tree v = decls.pop ();
14390 SET_DECL_VALUE_EXPR (t, v);
14394 static void
14395 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14397 gimple *stmt = gsi_stmt (*gsi_p);
14398 struct walk_stmt_info wi;
14399 gcall *call_stmt;
14401 if (gimple_has_location (stmt))
14402 input_location = gimple_location (stmt);
14404 if (make_addressable_vars)
14405 memset (&wi, '\0', sizeof (wi));
14407 /* If we have issued syntax errors, avoid doing any heavy lifting.
14408 Just replace the OMP directives with a NOP to avoid
14409 confusing RTL expansion. */
14410 if (seen_error () && is_gimple_omp (stmt))
14412 gsi_replace (gsi_p, gimple_build_nop (), true);
14413 return;
14416 switch (gimple_code (stmt))
14418 case GIMPLE_COND:
14420 gcond *cond_stmt = as_a <gcond *> (stmt);
14421 if ((ctx || make_addressable_vars)
14422 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14423 lower_omp_regimplify_p,
14424 ctx ? NULL : &wi, NULL)
14425 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14426 lower_omp_regimplify_p,
14427 ctx ? NULL : &wi, NULL)))
14428 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14430 break;
14431 case GIMPLE_CATCH:
14432 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14433 break;
14434 case GIMPLE_EH_FILTER:
14435 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14436 break;
14437 case GIMPLE_TRY:
14438 lower_omp (gimple_try_eval_ptr (stmt), ctx);
14439 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14440 break;
14441 case GIMPLE_ASSUME:
14442 lower_omp (gimple_assume_body_ptr (stmt), ctx);
14443 break;
14444 case GIMPLE_TRANSACTION:
14445 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14446 ctx);
14447 break;
14448 case GIMPLE_BIND:
14449 if (ctx && is_gimple_omp_oacc (ctx->stmt))
14451 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14452 oacc_privatization_scan_decl_chain (ctx, vars);
14454 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14455 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14456 break;
14457 case GIMPLE_OMP_PARALLEL:
14458 case GIMPLE_OMP_TASK:
14459 ctx = maybe_lookup_ctx (stmt);
14460 gcc_assert (ctx);
14461 if (ctx->cancellable)
14462 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14463 lower_omp_taskreg (gsi_p, ctx);
14464 break;
14465 case GIMPLE_OMP_FOR:
14466 ctx = maybe_lookup_ctx (stmt);
14467 gcc_assert (ctx);
14468 if (ctx->cancellable)
14469 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14470 lower_omp_for (gsi_p, ctx);
14471 break;
14472 case GIMPLE_OMP_SECTIONS:
14473 ctx = maybe_lookup_ctx (stmt);
14474 gcc_assert (ctx);
14475 if (ctx->cancellable)
14476 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14477 lower_omp_sections (gsi_p, ctx);
14478 break;
14479 case GIMPLE_OMP_SCOPE:
14480 ctx = maybe_lookup_ctx (stmt);
14481 gcc_assert (ctx);
14482 lower_omp_scope (gsi_p, ctx);
14483 break;
14484 case GIMPLE_OMP_SINGLE:
14485 ctx = maybe_lookup_ctx (stmt);
14486 gcc_assert (ctx);
14487 lower_omp_single (gsi_p, ctx);
14488 break;
14489 case GIMPLE_OMP_MASTER:
14490 case GIMPLE_OMP_MASKED:
14491 ctx = maybe_lookup_ctx (stmt);
14492 gcc_assert (ctx);
14493 lower_omp_master (gsi_p, ctx);
14494 break;
14495 case GIMPLE_OMP_TASKGROUP:
14496 ctx = maybe_lookup_ctx (stmt);
14497 gcc_assert (ctx);
14498 lower_omp_taskgroup (gsi_p, ctx);
14499 break;
14500 case GIMPLE_OMP_ORDERED:
14501 ctx = maybe_lookup_ctx (stmt);
14502 gcc_assert (ctx);
14503 lower_omp_ordered (gsi_p, ctx);
14504 break;
14505 case GIMPLE_OMP_SCAN:
14506 ctx = maybe_lookup_ctx (stmt);
14507 gcc_assert (ctx);
14508 lower_omp_scan (gsi_p, ctx);
14509 break;
14510 case GIMPLE_OMP_CRITICAL:
14511 ctx = maybe_lookup_ctx (stmt);
14512 gcc_assert (ctx);
14513 lower_omp_critical (gsi_p, ctx);
14514 break;
14515 case GIMPLE_OMP_ATOMIC_LOAD:
14516 if ((ctx || make_addressable_vars)
14517 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14518 as_a <gomp_atomic_load *> (stmt)),
14519 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14520 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14521 break;
14522 case GIMPLE_OMP_TARGET:
14523 ctx = maybe_lookup_ctx (stmt);
14524 gcc_assert (ctx);
14525 lower_omp_target (gsi_p, ctx);
14526 break;
14527 case GIMPLE_OMP_TEAMS:
14528 ctx = maybe_lookup_ctx (stmt);
14529 gcc_assert (ctx);
14530 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14531 lower_omp_taskreg (gsi_p, ctx);
14532 else
14533 lower_omp_teams (gsi_p, ctx);
14534 break;
14535 case GIMPLE_CALL:
14536 tree fndecl;
14537 call_stmt = as_a <gcall *> (stmt);
14538 fndecl = gimple_call_fndecl (call_stmt);
14539 if (fndecl
14540 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14541 switch (DECL_FUNCTION_CODE (fndecl))
14543 case BUILT_IN_GOMP_BARRIER:
14544 if (ctx == NULL)
14545 break;
14546 /* FALLTHRU */
14547 case BUILT_IN_GOMP_CANCEL:
14548 case BUILT_IN_GOMP_CANCELLATION_POINT:
14549 omp_context *cctx;
14550 cctx = ctx;
14551 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14552 cctx = cctx->outer;
14553 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14554 if (!cctx->cancellable)
14556 if (DECL_FUNCTION_CODE (fndecl)
14557 == BUILT_IN_GOMP_CANCELLATION_POINT)
14559 stmt = gimple_build_nop ();
14560 gsi_replace (gsi_p, stmt, false);
14562 break;
14564 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14566 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14567 gimple_call_set_fndecl (call_stmt, fndecl);
14568 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14570 tree lhs;
14571 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14572 gimple_call_set_lhs (call_stmt, lhs);
14573 tree fallthru_label;
14574 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14575 gimple *g;
14576 g = gimple_build_label (fallthru_label);
14577 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14578 g = gimple_build_cond (NE_EXPR, lhs,
14579 fold_convert (TREE_TYPE (lhs),
14580 boolean_false_node),
14581 cctx->cancel_label, fallthru_label);
14582 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14583 break;
14584 default:
14585 break;
14587 goto regimplify;
14589 case GIMPLE_ASSIGN:
14590 for (omp_context *up = ctx; up; up = up->outer)
14592 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14593 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14594 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14595 || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14596 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14597 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14598 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14599 && (gimple_omp_target_kind (up->stmt)
14600 == GF_OMP_TARGET_KIND_DATA)))
14601 continue;
14602 else if (!up->lastprivate_conditional_map)
14603 break;
14604 tree lhs = get_base_address (gimple_assign_lhs (stmt));
14605 if (TREE_CODE (lhs) == MEM_REF
14606 && DECL_P (TREE_OPERAND (lhs, 0))
14607 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14608 0))) == REFERENCE_TYPE)
14609 lhs = TREE_OPERAND (lhs, 0);
14610 if (DECL_P (lhs))
14611 if (tree *v = up->lastprivate_conditional_map->get (lhs))
14613 tree clauses;
14614 if (up->combined_into_simd_safelen1)
14616 up = up->outer;
14617 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14618 up = up->outer;
14620 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14621 clauses = gimple_omp_for_clauses (up->stmt);
14622 else
14623 clauses = gimple_omp_sections_clauses (up->stmt);
14624 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14625 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14626 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14627 OMP_CLAUSE__CONDTEMP_);
14628 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14629 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14630 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14633 /* FALLTHRU */
14635 default:
14636 regimplify:
14637 if ((ctx || make_addressable_vars)
14638 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14639 ctx ? NULL : &wi))
14641 /* Just remove clobbers, this should happen only if we have
14642 "privatized" local addressable variables in SIMD regions,
14643 the clobber isn't needed in that case and gimplifying address
14644 of the ARRAY_REF into a pointer and creating MEM_REF based
14645 clobber would create worse code than we get with the clobber
14646 dropped. */
14647 if (gimple_clobber_p (stmt))
14649 gsi_replace (gsi_p, gimple_build_nop (), true);
14650 break;
14652 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14654 break;
14658 static void
14659 lower_omp (gimple_seq *body, omp_context *ctx)
14661 location_t saved_location = input_location;
14662 gimple_stmt_iterator gsi;
14663 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14664 lower_omp_1 (&gsi, ctx);
14665 /* During gimplification, we haven't folded statments inside offloading
14666 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14667 if (target_nesting_level || taskreg_nesting_level)
14668 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14669 fold_stmt (&gsi);
14670 input_location = saved_location;
14673 /* Main entry point. */
14675 static unsigned int
14676 execute_lower_omp (void)
14678 gimple_seq body;
14679 int i;
14680 omp_context *ctx;
14682 /* This pass always runs, to provide PROP_gimple_lomp.
14683 But often, there is nothing to do. */
14684 if (flag_openacc == 0 && flag_openmp == 0
14685 && flag_openmp_simd == 0)
14686 return 0;
14688 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14689 delete_omp_context);
14691 body = gimple_body (current_function_decl);
14693 scan_omp (&body, NULL);
14694 gcc_assert (taskreg_nesting_level == 0);
14695 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14696 finish_taskreg_scan (ctx);
14697 taskreg_contexts.release ();
14699 if (all_contexts->root)
14701 if (make_addressable_vars)
14702 push_gimplify_context ();
14703 lower_omp (&body, NULL);
14704 if (make_addressable_vars)
14705 pop_gimplify_context (NULL);
14708 if (all_contexts)
14710 splay_tree_delete (all_contexts);
14711 all_contexts = NULL;
14713 BITMAP_FREE (make_addressable_vars);
14714 BITMAP_FREE (global_nonaddressable_vars);
14716 /* If current function is a method, remove artificial dummy VAR_DECL created
14717 for non-static data member privatization, they aren't needed for
14718 debuginfo nor anything else, have been already replaced everywhere in the
14719 IL and cause problems with LTO. */
14720 if (DECL_ARGUMENTS (current_function_decl)
14721 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14722 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14723 == POINTER_TYPE))
14724 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14726 for (auto task_stmt : task_cpyfns)
14727 finalize_task_copyfn (task_stmt);
14728 task_cpyfns.release ();
14729 return 0;
14732 namespace {
14734 const pass_data pass_data_lower_omp =
14736 GIMPLE_PASS, /* type */
14737 "omplower", /* name */
14738 OPTGROUP_OMP, /* optinfo_flags */
14739 TV_NONE, /* tv_id */
14740 PROP_gimple_any, /* properties_required */
14741 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14742 0, /* properties_destroyed */
14743 0, /* todo_flags_start */
14744 0, /* todo_flags_finish */
14747 class pass_lower_omp : public gimple_opt_pass
14749 public:
14750 pass_lower_omp (gcc::context *ctxt)
14751 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14754 /* opt_pass methods: */
14755 unsigned int execute (function *) final override
14757 return execute_lower_omp ();
14760 }; // class pass_lower_omp
14762 } // anon namespace
14764 gimple_opt_pass *
14765 make_pass_lower_omp (gcc::context *ctxt)
14767 return new pass_lower_omp (ctxt);
14770 /* The following is a utility to diagnose structured block violations.
14771 It is not part of the "omplower" pass, as that's invoked too late. It
14772 should be invoked by the respective front ends after gimplification. */
14774 static splay_tree all_labels;
14776 /* Check for mismatched contexts and generate an error if needed. Return
14777 true if an error is detected. */
14779 static bool
14780 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14781 gimple *branch_ctx, gimple *label_ctx)
14783 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14784 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14786 if (label_ctx == branch_ctx)
14787 return false;
14789 const char* kind = NULL;
14791 if (flag_openacc)
14793 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14794 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14796 gcc_checking_assert (kind == NULL);
14797 kind = "OpenACC";
14800 if (kind == NULL)
14802 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14803 kind = "OpenMP";
14806 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14807 so we could traverse it and issue a correct "exit" or "enter" error
14808 message upon a structured block violation.
14810 We built the context by building a list with tree_cons'ing, but there is
14811 no easy counterpart in gimple tuples. It seems like far too much work
14812 for issuing exit/enter error messages. If someone really misses the
14813 distinct error message... patches welcome. */
14815 #if 0
14816 /* Try to avoid confusing the user by producing and error message
14817 with correct "exit" or "enter" verbiage. We prefer "exit"
14818 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14819 if (branch_ctx == NULL)
14820 exit_p = false;
14821 else
14823 while (label_ctx)
14825 if (TREE_VALUE (label_ctx) == branch_ctx)
14827 exit_p = false;
14828 break;
14830 label_ctx = TREE_CHAIN (label_ctx);
14834 if (exit_p)
14835 error ("invalid exit from %s structured block", kind);
14836 else
14837 error ("invalid entry to %s structured block", kind);
14838 #endif
14840 /* If it's obvious we have an invalid entry, be specific about the error. */
14841 if (branch_ctx == NULL)
14842 error ("invalid entry to %s structured block", kind);
14843 else
14845 /* Otherwise, be vague and lazy, but efficient. */
14846 error ("invalid branch to/from %s structured block", kind);
14849 gsi_replace (gsi_p, gimple_build_nop (), false);
14850 return true;
14853 /* Pass 1: Create a minimal tree of structured blocks, and record
14854 where each label is found. */
14856 static tree
14857 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14858 struct walk_stmt_info *wi)
14860 gimple *context = (gimple *) wi->info;
14861 gimple *inner_context;
14862 gimple *stmt = gsi_stmt (*gsi_p);
14864 *handled_ops_p = true;
14866 switch (gimple_code (stmt))
14868 WALK_SUBSTMTS;
14870 case GIMPLE_OMP_PARALLEL:
14871 case GIMPLE_OMP_TASK:
14872 case GIMPLE_OMP_SCOPE:
14873 case GIMPLE_OMP_SECTIONS:
14874 case GIMPLE_OMP_SINGLE:
14875 case GIMPLE_OMP_SECTION:
14876 case GIMPLE_OMP_MASTER:
14877 case GIMPLE_OMP_MASKED:
14878 case GIMPLE_OMP_ORDERED:
14879 case GIMPLE_OMP_SCAN:
14880 case GIMPLE_OMP_CRITICAL:
14881 case GIMPLE_OMP_TARGET:
14882 case GIMPLE_OMP_TEAMS:
14883 case GIMPLE_OMP_TASKGROUP:
14884 /* The minimal context here is just the current OMP construct. */
14885 inner_context = stmt;
14886 wi->info = inner_context;
14887 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14888 wi->info = context;
14889 break;
14891 case GIMPLE_OMP_FOR:
14892 inner_context = stmt;
14893 wi->info = inner_context;
14894 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14895 walk them. */
14896 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14897 diagnose_sb_1, NULL, wi);
14898 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14899 wi->info = context;
14900 break;
14902 case GIMPLE_LABEL:
14903 splay_tree_insert (all_labels,
14904 (splay_tree_key) gimple_label_label (
14905 as_a <glabel *> (stmt)),
14906 (splay_tree_value) context);
14907 break;
14909 default:
14910 break;
14913 return NULL_TREE;
14916 /* Pass 2: Check each branch and see if its context differs from that of
14917 the destination label's context. */
14919 static tree
14920 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14921 struct walk_stmt_info *wi)
14923 gimple *context = (gimple *) wi->info;
14924 splay_tree_node n;
14925 gimple *stmt = gsi_stmt (*gsi_p);
14927 *handled_ops_p = true;
14929 switch (gimple_code (stmt))
14931 WALK_SUBSTMTS;
14933 case GIMPLE_OMP_PARALLEL:
14934 case GIMPLE_OMP_TASK:
14935 case GIMPLE_OMP_SCOPE:
14936 case GIMPLE_OMP_SECTIONS:
14937 case GIMPLE_OMP_SINGLE:
14938 case GIMPLE_OMP_SECTION:
14939 case GIMPLE_OMP_MASTER:
14940 case GIMPLE_OMP_MASKED:
14941 case GIMPLE_OMP_ORDERED:
14942 case GIMPLE_OMP_SCAN:
14943 case GIMPLE_OMP_CRITICAL:
14944 case GIMPLE_OMP_TARGET:
14945 case GIMPLE_OMP_TEAMS:
14946 case GIMPLE_OMP_TASKGROUP:
14947 wi->info = stmt;
14948 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14949 wi->info = context;
14950 break;
14952 case GIMPLE_OMP_FOR:
14953 wi->info = stmt;
14954 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14955 walk them. */
14956 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14957 diagnose_sb_2, NULL, wi);
14958 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14959 wi->info = context;
14960 break;
14962 case GIMPLE_COND:
14964 gcond *cond_stmt = as_a <gcond *> (stmt);
14965 tree lab = gimple_cond_true_label (cond_stmt);
14966 if (lab)
14968 n = splay_tree_lookup (all_labels,
14969 (splay_tree_key) lab);
14970 diagnose_sb_0 (gsi_p, context,
14971 n ? (gimple *) n->value : NULL);
14973 lab = gimple_cond_false_label (cond_stmt);
14974 if (lab)
14976 n = splay_tree_lookup (all_labels,
14977 (splay_tree_key) lab);
14978 diagnose_sb_0 (gsi_p, context,
14979 n ? (gimple *) n->value : NULL);
14982 break;
14984 case GIMPLE_GOTO:
14986 tree lab = gimple_goto_dest (stmt);
14987 if (TREE_CODE (lab) != LABEL_DECL)
14988 break;
14990 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14991 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14993 break;
14995 case GIMPLE_SWITCH:
14997 gswitch *switch_stmt = as_a <gswitch *> (stmt);
14998 unsigned int i;
14999 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
15001 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
15002 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
15003 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
15004 break;
15007 break;
15009 case GIMPLE_RETURN:
15010 diagnose_sb_0 (gsi_p, context, NULL);
15011 break;
15013 default:
15014 break;
15017 return NULL_TREE;
15020 static unsigned int
15021 diagnose_omp_structured_block_errors (void)
15023 struct walk_stmt_info wi;
15024 gimple_seq body = gimple_body (current_function_decl);
15026 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
15028 memset (&wi, 0, sizeof (wi));
15029 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
15031 memset (&wi, 0, sizeof (wi));
15032 wi.want_locations = true;
15033 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
15035 gimple_set_body (current_function_decl, body);
15037 splay_tree_delete (all_labels);
15038 all_labels = NULL;
15040 return 0;
15043 namespace {
15045 const pass_data pass_data_diagnose_omp_blocks =
15047 GIMPLE_PASS, /* type */
15048 "*diagnose_omp_blocks", /* name */
15049 OPTGROUP_OMP, /* optinfo_flags */
15050 TV_NONE, /* tv_id */
15051 PROP_gimple_any, /* properties_required */
15052 0, /* properties_provided */
15053 0, /* properties_destroyed */
15054 0, /* todo_flags_start */
15055 0, /* todo_flags_finish */
15058 class pass_diagnose_omp_blocks : public gimple_opt_pass
15060 public:
15061 pass_diagnose_omp_blocks (gcc::context *ctxt)
15062 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
15065 /* opt_pass methods: */
15066 bool gate (function *) final override
15068 return flag_openacc || flag_openmp || flag_openmp_simd;
15070 unsigned int execute (function *) final override
15072 return diagnose_omp_structured_block_errors ();
15075 }; // class pass_diagnose_omp_blocks
15077 } // anon namespace
15079 gimple_opt_pass *
15080 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
15082 return new pass_diagnose_omp_blocks (ctxt);
15086 #include "gt-omp-low.h"