c++: fix cxx_print_type's template-info dumping
[official-gcc.git] / gcc / omp-low.cc
blob5d7c32dac39fa83fcfac27bc8d76116367e88475
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2023 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "gimplify.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
154 /* True if this construct can be cancelled. */
155 bool cancellable;
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap make_addressable_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
194 static vec<gomp_task *> task_cpyfns;
196 static void scan_omp (gimple_seq *, omp_context *);
197 static tree scan_omp_1_op (tree *, int *, void *);
198 static bool omp_maybe_offloaded_ctx (omp_context *ctx);
200 #define WALK_SUBSTMTS \
201 case GIMPLE_BIND: \
202 case GIMPLE_TRY: \
203 case GIMPLE_CATCH: \
204 case GIMPLE_EH_FILTER: \
205 case GIMPLE_ASSUME: \
206 case GIMPLE_TRANSACTION: \
207 /* The sub-statements for these should be walked. */ \
208 *handled_ops_p = false; \
209 break;
211 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
212 (This doesn't include OpenACC 'kernels' decomposed parts.) */
214 static bool
215 is_oacc_parallel_or_serial (omp_context *ctx)
217 enum gimple_code outer_type = gimple_code (ctx->stmt);
218 return ((outer_type == GIMPLE_OMP_TARGET)
219 && ((gimple_omp_target_kind (ctx->stmt)
220 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
221 || (gimple_omp_target_kind (ctx->stmt)
222 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
225 /* Return whether CTX represents an OpenACC 'kernels' construct.
226 (This doesn't include OpenACC 'kernels' decomposed parts.) */
228 static bool
229 is_oacc_kernels (omp_context *ctx)
231 enum gimple_code outer_type = gimple_code (ctx->stmt);
232 return ((outer_type == GIMPLE_OMP_TARGET)
233 && (gimple_omp_target_kind (ctx->stmt)
234 == GF_OMP_TARGET_KIND_OACC_KERNELS));
237 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
239 static bool
240 is_oacc_kernels_decomposed_part (omp_context *ctx)
242 enum gimple_code outer_type = gimple_code (ctx->stmt);
243 return ((outer_type == GIMPLE_OMP_TARGET)
244 && ((gimple_omp_target_kind (ctx->stmt)
245 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
246 || (gimple_omp_target_kind (ctx->stmt)
247 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
248 || (gimple_omp_target_kind (ctx->stmt)
249 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
252 /* Return true if STMT corresponds to an OpenMP target region. */
253 static bool
254 is_omp_target (gimple *stmt)
256 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
258 int kind = gimple_omp_target_kind (stmt);
259 return (kind == GF_OMP_TARGET_KIND_REGION
260 || kind == GF_OMP_TARGET_KIND_DATA
261 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
262 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
264 return false;
267 /* If DECL is the artificial dummy VAR_DECL created for non-static
268 data member privatization, return the underlying "this" parameter,
269 otherwise return NULL. */
271 tree
272 omp_member_access_dummy_var (tree decl)
274 if (!VAR_P (decl)
275 || !DECL_ARTIFICIAL (decl)
276 || !DECL_IGNORED_P (decl)
277 || !DECL_HAS_VALUE_EXPR_P (decl)
278 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
279 return NULL_TREE;
281 tree v = DECL_VALUE_EXPR (decl);
282 if (TREE_CODE (v) != COMPONENT_REF)
283 return NULL_TREE;
285 while (1)
286 switch (TREE_CODE (v))
288 case COMPONENT_REF:
289 case MEM_REF:
290 case INDIRECT_REF:
291 CASE_CONVERT:
292 case POINTER_PLUS_EXPR:
293 v = TREE_OPERAND (v, 0);
294 continue;
295 case PARM_DECL:
296 if (DECL_CONTEXT (v) == current_function_decl
297 && DECL_ARTIFICIAL (v)
298 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
299 return v;
300 return NULL_TREE;
301 default:
302 return NULL_TREE;
306 /* Helper for unshare_and_remap, called through walk_tree. */
308 static tree
309 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
311 tree *pair = (tree *) data;
312 if (*tp == pair[0])
314 *tp = unshare_expr (pair[1]);
315 *walk_subtrees = 0;
317 else if (IS_TYPE_OR_DECL_P (*tp))
318 *walk_subtrees = 0;
319 return NULL_TREE;
322 /* Return unshare_expr (X) with all occurrences of FROM
323 replaced with TO. */
325 static tree
326 unshare_and_remap (tree x, tree from, tree to)
328 tree pair[2] = { from, to };
329 x = unshare_expr (x);
330 walk_tree (&x, unshare_and_remap_1, pair, NULL);
331 return x;
334 /* Convenience function for calling scan_omp_1_op on tree operands. */
336 static inline tree
337 scan_omp_op (tree *tp, omp_context *ctx)
339 struct walk_stmt_info wi;
341 memset (&wi, 0, sizeof (wi));
342 wi.info = ctx;
343 wi.want_locations = true;
345 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
348 static void lower_omp (gimple_seq *, omp_context *);
349 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
350 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
352 /* Return true if CTX is for an omp parallel. */
354 static inline bool
355 is_parallel_ctx (omp_context *ctx)
357 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
361 /* Return true if CTX is for an omp task. */
363 static inline bool
364 is_task_ctx (omp_context *ctx)
366 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
370 /* Return true if CTX is for an omp taskloop. */
372 static inline bool
373 is_taskloop_ctx (omp_context *ctx)
375 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
376 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
380 /* Return true if CTX is for a host omp teams. */
382 static inline bool
383 is_host_teams_ctx (omp_context *ctx)
385 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
386 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
389 /* Return true if CTX is for an omp parallel or omp task or host omp teams
390 (the last one is strictly not a task region in OpenMP speak, but we
391 need to treat it similarly). */
393 static inline bool
394 is_taskreg_ctx (omp_context *ctx)
396 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
399 /* Return true if EXPR is variable sized. */
401 static inline bool
402 is_variable_sized (const_tree expr)
404 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
407 /* Lookup variables. The "maybe" form
408 allows for the variable form to not have been entered, otherwise we
409 assert that the variable must have been entered. */
411 static inline tree
412 lookup_decl (tree var, omp_context *ctx)
414 tree *n = ctx->cb.decl_map->get (var);
415 return *n;
418 static inline tree
419 maybe_lookup_decl (const_tree var, omp_context *ctx)
421 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
422 return n ? *n : NULL_TREE;
425 static inline tree
426 lookup_field (tree var, omp_context *ctx)
428 splay_tree_node n;
429 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
430 return (tree) n->value;
433 static inline tree
434 lookup_sfield (splay_tree_key key, omp_context *ctx)
436 splay_tree_node n;
437 n = splay_tree_lookup (ctx->sfield_map
438 ? ctx->sfield_map : ctx->field_map, key);
439 return (tree) n->value;
442 static inline tree
443 lookup_sfield (tree var, omp_context *ctx)
445 return lookup_sfield ((splay_tree_key) var, ctx);
448 static inline tree
449 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
451 splay_tree_node n;
452 n = splay_tree_lookup (ctx->field_map, key);
453 return n ? (tree) n->value : NULL_TREE;
456 static inline tree
457 maybe_lookup_field (tree var, omp_context *ctx)
459 return maybe_lookup_field ((splay_tree_key) var, ctx);
462 /* Return true if DECL should be copied by pointer. SHARED_CTX is
463 the parallel context if DECL is to be shared. */
465 static bool
466 use_pointer_for_field (tree decl, omp_context *shared_ctx)
468 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
469 || TYPE_ATOMIC (TREE_TYPE (decl)))
470 return true;
472 /* We can only use copy-in/copy-out semantics for shared variables
473 when we know the value is not accessible from an outer scope. */
474 if (shared_ctx)
476 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
478 /* ??? Trivially accessible from anywhere. But why would we even
479 be passing an address in this case? Should we simply assert
480 this to be false, or should we have a cleanup pass that removes
481 these from the list of mappings? */
482 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
483 return true;
485 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
486 without analyzing the expression whether or not its location
487 is accessible to anyone else. In the case of nested parallel
488 regions it certainly may be. */
489 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
490 return true;
492 /* Do not use copy-in/copy-out for variables that have their
493 address taken. */
494 if (is_global_var (decl))
496 /* For file scope vars, track whether we've seen them as
497 non-addressable initially and in that case, keep the same
498 answer for the duration of the pass, even when they are made
499 addressable later on e.g. through reduction expansion. Global
500 variables which weren't addressable before the pass will not
501 have their privatized copies address taken. See PR91216. */
502 if (!TREE_ADDRESSABLE (decl))
504 if (!global_nonaddressable_vars)
505 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
506 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
508 else if (!global_nonaddressable_vars
509 || !bitmap_bit_p (global_nonaddressable_vars,
510 DECL_UID (decl)))
511 return true;
513 else if (TREE_ADDRESSABLE (decl))
514 return true;
516 /* lower_send_shared_vars only uses copy-in, but not copy-out
517 for these. */
518 if (TREE_READONLY (decl)
519 || ((TREE_CODE (decl) == RESULT_DECL
520 || TREE_CODE (decl) == PARM_DECL)
521 && DECL_BY_REFERENCE (decl)))
522 return false;
524 /* Disallow copy-in/out in nested parallel if
525 decl is shared in outer parallel, otherwise
526 each thread could store the shared variable
527 in its own copy-in location, making the
528 variable no longer really shared. */
529 if (shared_ctx->is_nested)
531 omp_context *up;
533 for (up = shared_ctx->outer; up; up = up->outer)
534 if ((is_taskreg_ctx (up)
535 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
536 && is_gimple_omp_offloaded (up->stmt)))
537 && maybe_lookup_decl (decl, up))
538 break;
540 if (up)
542 tree c;
544 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
546 for (c = gimple_omp_target_clauses (up->stmt);
547 c; c = OMP_CLAUSE_CHAIN (c))
548 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
549 && OMP_CLAUSE_DECL (c) == decl)
550 break;
552 else
553 for (c = gimple_omp_taskreg_clauses (up->stmt);
554 c; c = OMP_CLAUSE_CHAIN (c))
555 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
556 && OMP_CLAUSE_DECL (c) == decl)
557 break;
559 if (c)
560 goto maybe_mark_addressable_and_ret;
564 /* For tasks avoid using copy-in/out. As tasks can be
565 deferred or executed in different thread, when GOMP_task
566 returns, the task hasn't necessarily terminated. */
567 if (is_task_ctx (shared_ctx))
569 tree outer;
570 maybe_mark_addressable_and_ret:
571 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
572 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
574 /* Taking address of OUTER in lower_send_shared_vars
575 might need regimplification of everything that uses the
576 variable. */
577 if (!make_addressable_vars)
578 make_addressable_vars = BITMAP_ALLOC (NULL);
579 bitmap_set_bit (make_addressable_vars, DECL_UID (outer));
580 TREE_ADDRESSABLE (outer) = 1;
582 return true;
586 return false;
589 /* Construct a new automatic decl similar to VAR. */
591 static tree
592 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
594 tree copy = copy_var_decl (var, name, type);
596 DECL_CONTEXT (copy) = current_function_decl;
598 if (ctx)
600 DECL_CHAIN (copy) = ctx->block_vars;
601 ctx->block_vars = copy;
603 else
604 record_vars (copy);
606 /* If VAR is listed in make_addressable_vars, it wasn't
607 originally addressable, but was only later made so.
608 We don't need to take address of privatizations
609 from that var. */
610 if (TREE_ADDRESSABLE (var)
611 && ((make_addressable_vars
612 && bitmap_bit_p (make_addressable_vars, DECL_UID (var)))
613 || (global_nonaddressable_vars
614 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
615 TREE_ADDRESSABLE (copy) = 0;
617 return copy;
620 static tree
621 omp_copy_decl_1 (tree var, omp_context *ctx)
623 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
626 /* Build tree nodes to access the field for VAR on the receiver side. */
628 static tree
629 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
631 tree x, field = lookup_field (var, ctx);
633 /* If the receiver record type was remapped in the child function,
634 remap the field into the new record type. */
635 x = maybe_lookup_field (field, ctx);
636 if (x != NULL)
637 field = x;
639 x = build_simple_mem_ref (ctx->receiver_decl);
640 TREE_THIS_NOTRAP (x) = 1;
641 x = omp_build_component_ref (x, field);
642 if (by_ref)
644 x = build_simple_mem_ref (x);
645 TREE_THIS_NOTRAP (x) = 1;
648 return x;
651 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
652 of a parallel, this is a component reference; for workshare constructs
653 this is some variable. */
655 static tree
656 build_outer_var_ref (tree var, omp_context *ctx,
657 enum omp_clause_code code = OMP_CLAUSE_ERROR)
659 tree x;
660 omp_context *outer = ctx->outer;
661 for (; outer; outer = outer->outer)
663 if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
664 continue;
665 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
666 && !maybe_lookup_decl (var, outer))
667 continue;
668 break;
671 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
672 x = var;
673 else if (is_variable_sized (var))
675 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
676 x = build_outer_var_ref (x, ctx, code);
677 x = build_simple_mem_ref (x);
679 else if (is_taskreg_ctx (ctx))
681 bool by_ref = use_pointer_for_field (var, NULL);
682 x = build_receiver_ref (var, by_ref, ctx);
684 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
685 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
686 || ctx->loop_p
687 || code == OMP_CLAUSE_ALLOCATE
688 || (code == OMP_CLAUSE_PRIVATE
689 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
690 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
691 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
693 /* #pragma omp simd isn't a worksharing construct, and can reference
694 even private vars in its linear etc. clauses.
695 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
696 to private vars in all worksharing constructs. */
697 x = NULL_TREE;
698 if (outer && is_taskreg_ctx (outer))
699 x = lookup_decl (var, outer);
700 else if (outer)
701 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
702 if (x == NULL_TREE)
703 x = var;
705 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
707 gcc_assert (outer);
708 splay_tree_node n
709 = splay_tree_lookup (outer->field_map,
710 (splay_tree_key) &DECL_UID (var));
711 if (n == NULL)
713 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
714 x = var;
715 else
716 x = lookup_decl (var, outer);
718 else
720 tree field = (tree) n->value;
721 /* If the receiver record type was remapped in the child function,
722 remap the field into the new record type. */
723 x = maybe_lookup_field (field, outer);
724 if (x != NULL)
725 field = x;
727 x = build_simple_mem_ref (outer->receiver_decl);
728 x = omp_build_component_ref (x, field);
729 if (use_pointer_for_field (var, outer))
730 x = build_simple_mem_ref (x);
733 else if (outer)
734 x = lookup_decl (var, outer);
735 else if (omp_privatize_by_reference (var))
736 /* This can happen with orphaned constructs. If var is reference, it is
737 possible it is shared and as such valid. */
738 x = var;
739 else if (omp_member_access_dummy_var (var))
740 x = var;
741 else
742 gcc_unreachable ();
744 if (x == var)
746 tree t = omp_member_access_dummy_var (var);
747 if (t)
749 x = DECL_VALUE_EXPR (var);
750 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
751 if (o != t)
752 x = unshare_and_remap (x, t, o);
753 else
754 x = unshare_expr (x);
758 if (omp_privatize_by_reference (var))
759 x = build_simple_mem_ref (x);
761 return x;
764 /* Build tree nodes to access the field for VAR on the sender side. */
766 static tree
767 build_sender_ref (splay_tree_key key, omp_context *ctx)
769 tree field = lookup_sfield (key, ctx);
770 return omp_build_component_ref (ctx->sender_decl, field);
773 static tree
774 build_sender_ref (tree var, omp_context *ctx)
776 return build_sender_ref ((splay_tree_key) var, ctx);
779 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
780 BASE_POINTERS_RESTRICT, declare the field with restrict. */
782 static void
783 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
785 tree field, type, sfield = NULL_TREE;
786 splay_tree_key key = (splay_tree_key) var;
788 if ((mask & 16) != 0)
790 key = (splay_tree_key) &DECL_NAME (var);
791 gcc_checking_assert (key != (splay_tree_key) var);
793 if ((mask & 8) != 0)
795 key = (splay_tree_key) &DECL_UID (var);
796 gcc_checking_assert (key != (splay_tree_key) var);
798 gcc_assert ((mask & 1) == 0
799 || !splay_tree_lookup (ctx->field_map, key));
800 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
801 || !splay_tree_lookup (ctx->sfield_map, key));
802 gcc_assert ((mask & 3) == 3
803 || !is_gimple_omp_oacc (ctx->stmt));
805 type = TREE_TYPE (var);
806 if ((mask & 16) != 0)
807 type = lang_hooks.decls.omp_array_data (var, true);
809 /* Prevent redeclaring the var in the split-off function with a restrict
810 pointer type. Note that we only clear type itself, restrict qualifiers in
811 the pointed-to type will be ignored by points-to analysis. */
812 if (POINTER_TYPE_P (type)
813 && TYPE_RESTRICT (type))
814 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
816 if (mask & 4)
818 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
819 type = build_pointer_type (build_pointer_type (type));
821 else if (by_ref)
822 type = build_pointer_type (type);
823 else if ((mask & (32 | 3)) == 1
824 && omp_privatize_by_reference (var))
825 type = TREE_TYPE (type);
827 field = build_decl (DECL_SOURCE_LOCATION (var),
828 FIELD_DECL, DECL_NAME (var), type);
830 /* Remember what variable this field was created for. This does have a
831 side effect of making dwarf2out ignore this member, so for helpful
832 debugging we clear it later in delete_omp_context. */
833 DECL_ABSTRACT_ORIGIN (field) = var;
834 if ((mask & 16) == 0 && type == TREE_TYPE (var))
836 SET_DECL_ALIGN (field, DECL_ALIGN (var));
837 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
838 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
840 else
841 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
843 if ((mask & 3) == 3)
845 insert_field_into_struct (ctx->record_type, field);
846 if (ctx->srecord_type)
848 sfield = build_decl (DECL_SOURCE_LOCATION (var),
849 FIELD_DECL, DECL_NAME (var), type);
850 DECL_ABSTRACT_ORIGIN (sfield) = var;
851 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
852 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
853 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
854 insert_field_into_struct (ctx->srecord_type, sfield);
857 else
859 if (ctx->srecord_type == NULL_TREE)
861 tree t;
863 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
864 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
865 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
867 sfield = build_decl (DECL_SOURCE_LOCATION (t),
868 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
869 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
870 insert_field_into_struct (ctx->srecord_type, sfield);
871 splay_tree_insert (ctx->sfield_map,
872 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
873 (splay_tree_value) sfield);
876 sfield = field;
877 insert_field_into_struct ((mask & 1) ? ctx->record_type
878 : ctx->srecord_type, field);
881 if (mask & 1)
882 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
883 if ((mask & 2) && ctx->sfield_map)
884 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
887 static tree
888 install_var_local (tree var, omp_context *ctx)
890 tree new_var = omp_copy_decl_1 (var, ctx);
891 insert_decl_map (&ctx->cb, var, new_var);
892 return new_var;
895 /* Adjust the replacement for DECL in CTX for the new context. This means
896 copying the DECL_VALUE_EXPR, and fixing up the type. */
898 static void
899 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
901 tree new_decl, size;
903 new_decl = lookup_decl (decl, ctx);
905 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
907 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
908 && DECL_HAS_VALUE_EXPR_P (decl))
910 tree ve = DECL_VALUE_EXPR (decl);
911 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
912 SET_DECL_VALUE_EXPR (new_decl, ve);
913 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
916 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
918 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
919 if (size == error_mark_node)
920 size = TYPE_SIZE (TREE_TYPE (new_decl));
921 DECL_SIZE (new_decl) = size;
923 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
924 if (size == error_mark_node)
925 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
926 DECL_SIZE_UNIT (new_decl) = size;
930 /* The callback for remap_decl. Search all containing contexts for a
931 mapping of the variable; this avoids having to duplicate the splay
932 tree ahead of time. We know a mapping doesn't already exist in the
933 given context. Create new mappings to implement default semantics. */
935 static tree
936 omp_copy_decl (tree var, copy_body_data *cb)
938 omp_context *ctx = (omp_context *) cb;
939 tree new_var;
941 if (TREE_CODE (var) == LABEL_DECL)
943 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
944 return var;
945 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
946 DECL_CONTEXT (new_var) = current_function_decl;
947 insert_decl_map (&ctx->cb, var, new_var);
948 return new_var;
951 while (!is_taskreg_ctx (ctx))
953 ctx = ctx->outer;
954 if (ctx == NULL)
955 return var;
956 new_var = maybe_lookup_decl (var, ctx);
957 if (new_var)
958 return new_var;
961 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
962 return var;
964 return error_mark_node;
967 /* Create a new context, with OUTER_CTX being the surrounding context. */
969 static omp_context *
970 new_omp_context (gimple *stmt, omp_context *outer_ctx)
972 omp_context *ctx = XCNEW (omp_context);
974 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
975 (splay_tree_value) ctx);
976 ctx->stmt = stmt;
978 if (outer_ctx)
980 ctx->outer = outer_ctx;
981 ctx->cb = outer_ctx->cb;
982 ctx->cb.block = NULL;
983 ctx->depth = outer_ctx->depth + 1;
985 else
987 ctx->cb.src_fn = current_function_decl;
988 ctx->cb.dst_fn = current_function_decl;
989 ctx->cb.src_node = cgraph_node::get (current_function_decl);
990 gcc_checking_assert (ctx->cb.src_node);
991 ctx->cb.dst_node = ctx->cb.src_node;
992 ctx->cb.src_cfun = cfun;
993 ctx->cb.copy_decl = omp_copy_decl;
994 ctx->cb.eh_lp_nr = 0;
995 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
996 ctx->cb.adjust_array_error_bounds = true;
997 ctx->cb.dont_remap_vla_if_no_change = true;
998 ctx->depth = 1;
1001 ctx->cb.decl_map = new hash_map<tree, tree>;
1003 return ctx;
1006 static gimple_seq maybe_catch_exception (gimple_seq);
1008 /* Finalize task copyfn. */
1010 static void
1011 finalize_task_copyfn (gomp_task *task_stmt)
1013 struct function *child_cfun;
1014 tree child_fn;
1015 gimple_seq seq = NULL, new_seq;
1016 gbind *bind;
1018 child_fn = gimple_omp_task_copy_fn (task_stmt);
1019 if (child_fn == NULL_TREE)
1020 return;
1022 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1023 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1025 push_cfun (child_cfun);
1026 bind = gimplify_body (child_fn, false);
1027 gimple_seq_add_stmt (&seq, bind);
1028 new_seq = maybe_catch_exception (seq);
1029 if (new_seq != seq)
1031 bind = gimple_build_bind (NULL, new_seq, NULL);
1032 seq = NULL;
1033 gimple_seq_add_stmt (&seq, bind);
1035 gimple_set_body (child_fn, seq);
1036 pop_cfun ();
1038 /* Inform the callgraph about the new function. */
1039 cgraph_node *node = cgraph_node::get_create (child_fn);
1040 node->parallelized_function = 1;
1041 cgraph_node::add_new_function (child_fn, false);
1044 /* Destroy a omp_context data structures. Called through the splay tree
1045 value delete callback. */
1047 static void
1048 delete_omp_context (splay_tree_value value)
1050 omp_context *ctx = (omp_context *) value;
1052 delete ctx->cb.decl_map;
1054 if (ctx->field_map)
1055 splay_tree_delete (ctx->field_map);
1056 if (ctx->sfield_map)
1057 splay_tree_delete (ctx->sfield_map);
1059 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1060 it produces corrupt debug information. */
1061 if (ctx->record_type)
1063 tree t;
1064 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1065 DECL_ABSTRACT_ORIGIN (t) = NULL;
1067 if (ctx->srecord_type)
1069 tree t;
1070 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1071 DECL_ABSTRACT_ORIGIN (t) = NULL;
1074 if (ctx->task_reduction_map)
1076 ctx->task_reductions.release ();
1077 delete ctx->task_reduction_map;
1080 delete ctx->lastprivate_conditional_map;
1081 delete ctx->allocate_map;
1083 XDELETE (ctx);
1086 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1087 context. */
1089 static void
1090 fixup_child_record_type (omp_context *ctx)
1092 tree f, type = ctx->record_type;
1094 if (!ctx->receiver_decl)
1095 return;
1096 /* ??? It isn't sufficient to just call remap_type here, because
1097 variably_modified_type_p doesn't work the way we expect for
1098 record types. Testing each field for whether it needs remapping
1099 and creating a new record by hand works, however. */
1100 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1101 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1102 break;
1103 if (f)
1105 tree name, new_fields = NULL;
1107 type = lang_hooks.types.make_type (RECORD_TYPE);
1108 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1109 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1110 TYPE_DECL, name, type);
1111 TYPE_NAME (type) = name;
1113 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1115 tree new_f = copy_node (f);
1116 DECL_CONTEXT (new_f) = type;
1117 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1118 DECL_CHAIN (new_f) = new_fields;
1119 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1120 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1121 &ctx->cb, NULL);
1122 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1123 &ctx->cb, NULL);
1124 new_fields = new_f;
1126 /* Arrange to be able to look up the receiver field
1127 given the sender field. */
1128 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1129 (splay_tree_value) new_f);
1131 TYPE_FIELDS (type) = nreverse (new_fields);
1132 layout_type (type);
1135 /* In a target region we never modify any of the pointers in *.omp_data_i,
1136 so attempt to help the optimizers. */
1137 if (is_gimple_omp_offloaded (ctx->stmt))
1138 type = build_qualified_type (type, TYPE_QUAL_CONST);
1140 TREE_TYPE (ctx->receiver_decl)
1141 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1144 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1145 specified by CLAUSES. */
1147 static void
1148 scan_sharing_clauses (tree clauses, omp_context *ctx)
1150 tree c, decl;
1151 bool scan_array_reductions = false;
1153 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1154 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1155 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1156 /* omp_default_mem_alloc is 1 */
1157 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1158 || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1160 /* The allocate clauses that appear on a target construct or on
1161 constructs in a target region must specify an allocator expression
1162 unless a requires directive with the dynamic_allocators clause
1163 is present in the same compilation unit. */
1164 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1165 && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS) == 0)
1166 && omp_maybe_offloaded_ctx (ctx))
1167 error_at (OMP_CLAUSE_LOCATION (c), "%<allocate%> clause must"
1168 " specify an allocator here");
1169 if (ctx->allocate_map == NULL)
1170 ctx->allocate_map = new hash_map<tree, tree>;
1171 tree val = integer_zero_node;
1172 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1173 val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1174 if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1175 val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1176 ctx->allocate_map->put (OMP_CLAUSE_DECL (c), val);
1179 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1181 bool by_ref;
1183 switch (OMP_CLAUSE_CODE (c))
1185 case OMP_CLAUSE_PRIVATE:
1186 decl = OMP_CLAUSE_DECL (c);
1187 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1188 goto do_private;
1189 else if (!is_variable_sized (decl))
1190 install_var_local (decl, ctx);
1191 break;
1193 case OMP_CLAUSE_SHARED:
1194 decl = OMP_CLAUSE_DECL (c);
1195 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1196 ctx->allocate_map->remove (decl);
1197 /* Ignore shared directives in teams construct inside of
1198 target construct. */
1199 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1200 && !is_host_teams_ctx (ctx))
1202 /* Global variables don't need to be copied,
1203 the receiver side will use them directly. */
1204 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1205 if (is_global_var (odecl))
1206 break;
1207 insert_decl_map (&ctx->cb, decl, odecl);
1208 break;
1210 gcc_assert (is_taskreg_ctx (ctx));
1211 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1212 || !is_variable_sized (decl));
1213 /* Global variables don't need to be copied,
1214 the receiver side will use them directly. */
1215 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1216 break;
1217 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1219 use_pointer_for_field (decl, ctx);
1220 break;
1222 by_ref = use_pointer_for_field (decl, NULL);
1223 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1224 || TREE_ADDRESSABLE (decl)
1225 || by_ref
1226 || omp_privatize_by_reference (decl))
1228 by_ref = use_pointer_for_field (decl, ctx);
1229 install_var_field (decl, by_ref, 3, ctx);
1230 install_var_local (decl, ctx);
1231 break;
1233 /* We don't need to copy const scalar vars back. */
1234 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1235 goto do_private;
1237 case OMP_CLAUSE_REDUCTION:
1238 /* Collect 'reduction' clauses on OpenACC compute construct. */
1239 if (is_gimple_omp_oacc (ctx->stmt)
1240 && is_gimple_omp_offloaded (ctx->stmt))
1242 /* No 'reduction' clauses on OpenACC 'kernels'. */
1243 gcc_checking_assert (!is_oacc_kernels (ctx));
1244 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1245 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1247 ctx->local_reduction_clauses
1248 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1250 /* FALLTHRU */
1252 case OMP_CLAUSE_IN_REDUCTION:
1253 decl = OMP_CLAUSE_DECL (c);
1254 if (ctx->allocate_map
1255 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1256 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1257 || OMP_CLAUSE_REDUCTION_TASK (c)))
1258 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1259 || is_task_ctx (ctx)))
1261 /* For now. */
1262 if (ctx->allocate_map->get (decl))
1263 ctx->allocate_map->remove (decl);
1265 if (TREE_CODE (decl) == MEM_REF)
1267 tree t = TREE_OPERAND (decl, 0);
1268 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1269 t = TREE_OPERAND (t, 0);
1270 if (INDIRECT_REF_P (t)
1271 || TREE_CODE (t) == ADDR_EXPR)
1272 t = TREE_OPERAND (t, 0);
1273 if (is_omp_target (ctx->stmt))
1275 if (is_variable_sized (t))
1277 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1278 t = DECL_VALUE_EXPR (t);
1279 gcc_assert (INDIRECT_REF_P (t));
1280 t = TREE_OPERAND (t, 0);
1281 gcc_assert (DECL_P (t));
1283 tree at = t;
1284 if (ctx->outer)
1285 scan_omp_op (&at, ctx->outer);
1286 tree nt = omp_copy_decl_1 (at, ctx->outer);
1287 splay_tree_insert (ctx->field_map,
1288 (splay_tree_key) &DECL_CONTEXT (t),
1289 (splay_tree_value) nt);
1290 if (at != t)
1291 splay_tree_insert (ctx->field_map,
1292 (splay_tree_key) &DECL_CONTEXT (at),
1293 (splay_tree_value) nt);
1294 break;
1296 install_var_local (t, ctx);
1297 if (is_taskreg_ctx (ctx)
1298 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1299 || (is_task_ctx (ctx)
1300 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1301 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1302 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1303 == POINTER_TYPE)))))
1304 && !is_variable_sized (t)
1305 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1306 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1307 && !is_task_ctx (ctx))))
1309 by_ref = use_pointer_for_field (t, NULL);
1310 if (is_task_ctx (ctx)
1311 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1312 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1314 install_var_field (t, false, 1, ctx);
1315 install_var_field (t, by_ref, 2, ctx);
1317 else
1318 install_var_field (t, by_ref, 3, ctx);
1320 break;
1322 if (is_omp_target (ctx->stmt))
1324 tree at = decl;
1325 if (ctx->outer)
1326 scan_omp_op (&at, ctx->outer);
1327 tree nt = omp_copy_decl_1 (at, ctx->outer);
1328 splay_tree_insert (ctx->field_map,
1329 (splay_tree_key) &DECL_CONTEXT (decl),
1330 (splay_tree_value) nt);
1331 if (at != decl)
1332 splay_tree_insert (ctx->field_map,
1333 (splay_tree_key) &DECL_CONTEXT (at),
1334 (splay_tree_value) nt);
1335 break;
1337 if (is_task_ctx (ctx)
1338 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1339 && OMP_CLAUSE_REDUCTION_TASK (c)
1340 && is_parallel_ctx (ctx)))
1342 /* Global variables don't need to be copied,
1343 the receiver side will use them directly. */
1344 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1346 by_ref = use_pointer_for_field (decl, ctx);
1347 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1348 install_var_field (decl, by_ref, 3, ctx);
1350 install_var_local (decl, ctx);
1351 break;
1353 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1354 && OMP_CLAUSE_REDUCTION_TASK (c))
1356 install_var_local (decl, ctx);
1357 break;
1359 goto do_private;
1361 case OMP_CLAUSE_LASTPRIVATE:
1362 /* Let the corresponding firstprivate clause create
1363 the variable. */
1364 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1365 break;
1366 /* FALLTHRU */
1368 case OMP_CLAUSE_FIRSTPRIVATE:
1369 case OMP_CLAUSE_LINEAR:
1370 decl = OMP_CLAUSE_DECL (c);
1371 do_private:
1372 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1373 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1374 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1375 && is_gimple_omp_offloaded (ctx->stmt))
1377 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1378 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR
1379 && lang_hooks.decls.omp_array_data (decl, true)))
1381 by_ref = !omp_privatize_by_reference (decl);
1382 install_var_field (decl, by_ref, 3, ctx);
1384 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1386 if (INDIRECT_REF_P (decl))
1387 decl = TREE_OPERAND (decl, 0);
1388 install_var_field (decl, true, 3, ctx);
1390 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1391 install_var_field (decl, true, 3, ctx);
1392 else
1393 install_var_field (decl, false, 3, ctx);
1395 if (is_variable_sized (decl))
1397 if (is_task_ctx (ctx))
1399 if (ctx->allocate_map
1400 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1402 /* For now. */
1403 if (ctx->allocate_map->get (decl))
1404 ctx->allocate_map->remove (decl);
1406 install_var_field (decl, false, 1, ctx);
1408 break;
1410 else if (is_taskreg_ctx (ctx))
1412 bool global
1413 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1414 by_ref = use_pointer_for_field (decl, NULL);
1416 if (is_task_ctx (ctx)
1417 && (global || by_ref || omp_privatize_by_reference (decl)))
1419 if (ctx->allocate_map
1420 && ctx->allocate_map->get (decl))
1421 install_var_field (decl, by_ref, 32 | 1, ctx);
1422 else
1423 install_var_field (decl, false, 1, ctx);
1424 if (!global)
1425 install_var_field (decl, by_ref, 2, ctx);
1427 else if (!global)
1428 install_var_field (decl, by_ref, 3, ctx);
1430 install_var_local (decl, ctx);
1431 /* For descr arrays on target: firstprivatize data + attach ptr. */
1432 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1433 && is_gimple_omp_offloaded (ctx->stmt)
1434 && !is_gimple_omp_oacc (ctx->stmt)
1435 && lang_hooks.decls.omp_array_data (decl, true))
1437 install_var_field (decl, false, 16 | 3, ctx);
1438 install_var_field (decl, true, 8 | 3, ctx);
1440 break;
1442 case OMP_CLAUSE_USE_DEVICE_PTR:
1443 case OMP_CLAUSE_USE_DEVICE_ADDR:
1444 decl = OMP_CLAUSE_DECL (c);
1446 /* Fortran array descriptors. */
1447 if (lang_hooks.decls.omp_array_data (decl, true))
1448 install_var_field (decl, false, 19, ctx);
1449 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1450 && !omp_privatize_by_reference (decl)
1451 && !omp_is_allocatable_or_ptr (decl))
1452 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1453 install_var_field (decl, true, 11, ctx);
1454 else
1455 install_var_field (decl, false, 11, ctx);
1456 if (DECL_SIZE (decl)
1457 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1459 tree decl2 = DECL_VALUE_EXPR (decl);
1460 gcc_assert (INDIRECT_REF_P (decl2));
1461 decl2 = TREE_OPERAND (decl2, 0);
1462 gcc_assert (DECL_P (decl2));
1463 install_var_local (decl2, ctx);
1465 install_var_local (decl, ctx);
1466 break;
1468 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1469 decl = OMP_CLAUSE_DECL (c);
1470 while (INDIRECT_REF_P (decl)
1471 || TREE_CODE (decl) == ARRAY_REF)
1472 decl = TREE_OPERAND (decl, 0);
1473 goto do_private;
1475 case OMP_CLAUSE_IS_DEVICE_PTR:
1476 decl = OMP_CLAUSE_DECL (c);
1477 goto do_private;
1479 case OMP_CLAUSE__LOOPTEMP_:
1480 case OMP_CLAUSE__REDUCTEMP_:
1481 gcc_assert (is_taskreg_ctx (ctx));
1482 decl = OMP_CLAUSE_DECL (c);
1483 install_var_field (decl, false, 3, ctx);
1484 install_var_local (decl, ctx);
1485 break;
1487 case OMP_CLAUSE_COPYPRIVATE:
1488 case OMP_CLAUSE_COPYIN:
1489 decl = OMP_CLAUSE_DECL (c);
1490 by_ref = use_pointer_for_field (decl, NULL);
1491 install_var_field (decl, by_ref, 3, ctx);
1492 break;
1494 case OMP_CLAUSE_FINAL:
1495 case OMP_CLAUSE_IF:
1496 case OMP_CLAUSE_NUM_THREADS:
1497 case OMP_CLAUSE_NUM_TEAMS:
1498 case OMP_CLAUSE_THREAD_LIMIT:
1499 case OMP_CLAUSE_DEVICE:
1500 case OMP_CLAUSE_SCHEDULE:
1501 case OMP_CLAUSE_DIST_SCHEDULE:
1502 case OMP_CLAUSE_DEPEND:
1503 case OMP_CLAUSE_PRIORITY:
1504 case OMP_CLAUSE_GRAINSIZE:
1505 case OMP_CLAUSE_NUM_TASKS:
1506 case OMP_CLAUSE_NUM_GANGS:
1507 case OMP_CLAUSE_NUM_WORKERS:
1508 case OMP_CLAUSE_VECTOR_LENGTH:
1509 case OMP_CLAUSE_DETACH:
1510 case OMP_CLAUSE_FILTER:
1511 if (ctx->outer)
1512 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1513 break;
1515 case OMP_CLAUSE_TO:
1516 case OMP_CLAUSE_FROM:
1517 case OMP_CLAUSE_MAP:
1518 if (ctx->outer)
1519 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1520 decl = OMP_CLAUSE_DECL (c);
1521 /* If requested, make 'decl' addressable. */
1522 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1523 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c))
1525 gcc_checking_assert (DECL_P (decl));
1527 bool decl_addressable = TREE_ADDRESSABLE (decl);
1528 if (!decl_addressable)
1530 if (!make_addressable_vars)
1531 make_addressable_vars = BITMAP_ALLOC (NULL);
1532 bitmap_set_bit (make_addressable_vars, DECL_UID (decl));
1533 TREE_ADDRESSABLE (decl) = 1;
1536 if (dump_enabled_p ())
1538 location_t loc = OMP_CLAUSE_LOCATION (c);
1539 const dump_user_location_t d_u_loc
1540 = dump_user_location_t::from_location_t (loc);
1541 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1542 #if __GNUC__ >= 10
1543 # pragma GCC diagnostic push
1544 # pragma GCC diagnostic ignored "-Wformat"
1545 #endif
1546 if (!decl_addressable)
1547 dump_printf_loc (MSG_NOTE, d_u_loc,
1548 "variable %<%T%>"
1549 " made addressable\n",
1550 decl);
1551 else
1552 dump_printf_loc (MSG_NOTE, d_u_loc,
1553 "variable %<%T%>"
1554 " already made addressable\n",
1555 decl);
1556 #if __GNUC__ >= 10
1557 # pragma GCC diagnostic pop
1558 #endif
1561 /* Done. */
1562 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c) = 0;
1564 /* Global variables with "omp declare target" attribute
1565 don't need to be copied, the receiver side will use them
1566 directly. However, global variables with "omp declare target link"
1567 attribute need to be copied. Or when ALWAYS modifier is used. */
1568 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1569 && DECL_P (decl)
1570 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1571 && (OMP_CLAUSE_MAP_KIND (c)
1572 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1573 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1574 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1575 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1576 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1577 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1578 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1579 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_TO
1580 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_FROM
1581 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_TOFROM
1582 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1583 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1584 && varpool_node::get_create (decl)->offloadable
1585 && !lookup_attribute ("omp declare target link",
1586 DECL_ATTRIBUTES (decl)))
1587 break;
1588 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1589 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1591 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1592 not offloaded; there is nothing to map for those. */
1593 if (!is_gimple_omp_offloaded (ctx->stmt)
1594 && !POINTER_TYPE_P (TREE_TYPE (decl))
1595 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1596 break;
1598 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1599 && DECL_P (decl)
1600 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1601 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1602 && is_omp_target (ctx->stmt))
1604 /* If this is an offloaded region, an attach operation should
1605 only exist when the pointer variable is mapped in a prior
1606 clause.
1607 If we had an error, we may not have attempted to sort clauses
1608 properly, so avoid the test. */
1609 if (is_gimple_omp_offloaded (ctx->stmt)
1610 && !seen_error ())
1611 gcc_assert
1612 (maybe_lookup_decl (decl, ctx)
1613 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1614 && lookup_attribute ("omp declare target",
1615 DECL_ATTRIBUTES (decl))));
1617 /* By itself, attach/detach is generated as part of pointer
1618 variable mapping and should not create new variables in the
1619 offloaded region, however sender refs for it must be created
1620 for its address to be passed to the runtime. */
1621 tree field
1622 = build_decl (OMP_CLAUSE_LOCATION (c),
1623 FIELD_DECL, NULL_TREE, ptr_type_node);
1624 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1625 insert_field_into_struct (ctx->record_type, field);
1626 /* To not clash with a map of the pointer variable itself,
1627 attach/detach maps have their field looked up by the *clause*
1628 tree expression, not the decl. */
1629 gcc_assert (!splay_tree_lookup (ctx->field_map,
1630 (splay_tree_key) c));
1631 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1632 (splay_tree_value) field);
1633 break;
1635 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1636 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1637 || (OMP_CLAUSE_MAP_KIND (c)
1638 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1640 if (TREE_CODE (decl) == COMPONENT_REF
1641 || (INDIRECT_REF_P (decl)
1642 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1643 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1644 == REFERENCE_TYPE)
1645 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1646 == POINTER_TYPE)))))
1647 break;
1648 if (DECL_SIZE (decl)
1649 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1651 tree decl2 = DECL_VALUE_EXPR (decl);
1652 gcc_assert (INDIRECT_REF_P (decl2));
1653 decl2 = TREE_OPERAND (decl2, 0);
1654 gcc_assert (DECL_P (decl2));
1655 install_var_local (decl2, ctx);
1657 install_var_local (decl, ctx);
1658 break;
1660 if (DECL_P (decl))
1662 if (DECL_SIZE (decl)
1663 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1665 tree decl2 = DECL_VALUE_EXPR (decl);
1666 gcc_assert (INDIRECT_REF_P (decl2));
1667 decl2 = TREE_OPERAND (decl2, 0);
1668 gcc_assert (DECL_P (decl2));
1669 install_var_field (decl2, true, 3, ctx);
1670 install_var_local (decl2, ctx);
1671 install_var_local (decl, ctx);
1673 else
1675 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1676 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1677 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1678 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1679 install_var_field (decl, true, 7, ctx);
1680 else
1681 install_var_field (decl, true, 3, ctx);
1682 if (is_gimple_omp_offloaded (ctx->stmt)
1683 && !(is_gimple_omp_oacc (ctx->stmt)
1684 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1685 install_var_local (decl, ctx);
1688 else
1690 tree base = get_base_address (decl);
1691 tree nc = OMP_CLAUSE_CHAIN (c);
1692 if (DECL_P (base)
1693 && nc != NULL_TREE
1694 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1695 && OMP_CLAUSE_DECL (nc) == base
1696 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1697 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1699 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1700 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1702 else
1704 if (ctx->outer)
1706 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1707 decl = OMP_CLAUSE_DECL (c);
1709 gcc_assert (!splay_tree_lookup (ctx->field_map,
1710 (splay_tree_key) decl));
1711 tree field
1712 = build_decl (OMP_CLAUSE_LOCATION (c),
1713 FIELD_DECL, NULL_TREE, ptr_type_node);
1714 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1715 insert_field_into_struct (ctx->record_type, field);
1716 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1717 (splay_tree_value) field);
1720 break;
1722 case OMP_CLAUSE_ORDER:
1723 ctx->order_concurrent = true;
1724 break;
1726 case OMP_CLAUSE_BIND:
1727 ctx->loop_p = true;
1728 break;
1730 case OMP_CLAUSE_NOWAIT:
1731 case OMP_CLAUSE_ORDERED:
1732 case OMP_CLAUSE_COLLAPSE:
1733 case OMP_CLAUSE_UNTIED:
1734 case OMP_CLAUSE_MERGEABLE:
1735 case OMP_CLAUSE_PROC_BIND:
1736 case OMP_CLAUSE_SAFELEN:
1737 case OMP_CLAUSE_SIMDLEN:
1738 case OMP_CLAUSE_THREADS:
1739 case OMP_CLAUSE_SIMD:
1740 case OMP_CLAUSE_NOGROUP:
1741 case OMP_CLAUSE_DEFAULTMAP:
1742 case OMP_CLAUSE_ASYNC:
1743 case OMP_CLAUSE_WAIT:
1744 case OMP_CLAUSE_GANG:
1745 case OMP_CLAUSE_WORKER:
1746 case OMP_CLAUSE_VECTOR:
1747 case OMP_CLAUSE_INDEPENDENT:
1748 case OMP_CLAUSE_AUTO:
1749 case OMP_CLAUSE_SEQ:
1750 case OMP_CLAUSE_TILE:
1751 case OMP_CLAUSE__SIMT_:
1752 case OMP_CLAUSE_DEFAULT:
1753 case OMP_CLAUSE_NONTEMPORAL:
1754 case OMP_CLAUSE_IF_PRESENT:
1755 case OMP_CLAUSE_FINALIZE:
1756 case OMP_CLAUSE_TASK_REDUCTION:
1757 case OMP_CLAUSE_ALLOCATE:
1758 break;
1760 case OMP_CLAUSE_ALIGNED:
1761 decl = OMP_CLAUSE_DECL (c);
1762 if (is_global_var (decl)
1763 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1764 install_var_local (decl, ctx);
1765 break;
1767 case OMP_CLAUSE__CONDTEMP_:
1768 decl = OMP_CLAUSE_DECL (c);
1769 if (is_parallel_ctx (ctx))
1771 install_var_field (decl, false, 3, ctx);
1772 install_var_local (decl, ctx);
1774 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1775 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1776 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1777 install_var_local (decl, ctx);
1778 break;
1780 case OMP_CLAUSE__CACHE_:
1781 case OMP_CLAUSE_NOHOST:
1782 default:
1783 gcc_unreachable ();
1787 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1789 switch (OMP_CLAUSE_CODE (c))
1791 case OMP_CLAUSE_LASTPRIVATE:
1792 /* Let the corresponding firstprivate clause create
1793 the variable. */
1794 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1795 scan_array_reductions = true;
1796 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1797 break;
1798 /* FALLTHRU */
1800 case OMP_CLAUSE_FIRSTPRIVATE:
1801 case OMP_CLAUSE_PRIVATE:
1802 case OMP_CLAUSE_LINEAR:
1803 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1804 case OMP_CLAUSE_IS_DEVICE_PTR:
1805 decl = OMP_CLAUSE_DECL (c);
1806 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1808 while (INDIRECT_REF_P (decl)
1809 || TREE_CODE (decl) == ARRAY_REF)
1810 decl = TREE_OPERAND (decl, 0);
1813 if (is_variable_sized (decl))
1815 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1816 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1817 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1818 && is_gimple_omp_offloaded (ctx->stmt))
1820 tree decl2 = DECL_VALUE_EXPR (decl);
1821 gcc_assert (INDIRECT_REF_P (decl2));
1822 decl2 = TREE_OPERAND (decl2, 0);
1823 gcc_assert (DECL_P (decl2));
1824 install_var_local (decl2, ctx);
1825 fixup_remapped_decl (decl2, ctx, false);
1827 install_var_local (decl, ctx);
1829 fixup_remapped_decl (decl, ctx,
1830 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1831 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1832 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1833 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1834 scan_array_reductions = true;
1835 break;
1837 case OMP_CLAUSE_REDUCTION:
1838 case OMP_CLAUSE_IN_REDUCTION:
1839 decl = OMP_CLAUSE_DECL (c);
1840 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1842 if (is_variable_sized (decl))
1843 install_var_local (decl, ctx);
1844 fixup_remapped_decl (decl, ctx, false);
1846 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1847 scan_array_reductions = true;
1848 break;
1850 case OMP_CLAUSE_TASK_REDUCTION:
1851 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1852 scan_array_reductions = true;
1853 break;
1855 case OMP_CLAUSE_SHARED:
1856 /* Ignore shared directives in teams construct inside of
1857 target construct. */
1858 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1859 && !is_host_teams_ctx (ctx))
1860 break;
1861 decl = OMP_CLAUSE_DECL (c);
1862 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1863 break;
1864 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1866 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1867 ctx->outer)))
1868 break;
1869 bool by_ref = use_pointer_for_field (decl, ctx);
1870 install_var_field (decl, by_ref, 11, ctx);
1871 break;
1873 fixup_remapped_decl (decl, ctx, false);
1874 break;
1876 case OMP_CLAUSE_MAP:
1877 if (!is_gimple_omp_offloaded (ctx->stmt))
1878 break;
1879 decl = OMP_CLAUSE_DECL (c);
1880 if (DECL_P (decl)
1881 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1882 && (OMP_CLAUSE_MAP_KIND (c)
1883 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1884 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1885 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1886 && varpool_node::get_create (decl)->offloadable)
1887 break;
1888 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1889 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1890 && is_omp_target (ctx->stmt)
1891 && !is_gimple_omp_offloaded (ctx->stmt))
1892 break;
1893 if (DECL_P (decl))
1895 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1896 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1897 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1898 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1900 tree new_decl = lookup_decl (decl, ctx);
1901 TREE_TYPE (new_decl)
1902 = remap_type (TREE_TYPE (decl), &ctx->cb);
1904 else if (DECL_SIZE (decl)
1905 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1907 tree decl2 = DECL_VALUE_EXPR (decl);
1908 gcc_assert (INDIRECT_REF_P (decl2));
1909 decl2 = TREE_OPERAND (decl2, 0);
1910 gcc_assert (DECL_P (decl2));
1911 fixup_remapped_decl (decl2, ctx, false);
1912 fixup_remapped_decl (decl, ctx, true);
1914 else
1915 fixup_remapped_decl (decl, ctx, false);
1917 break;
1919 case OMP_CLAUSE_COPYPRIVATE:
1920 case OMP_CLAUSE_COPYIN:
1921 case OMP_CLAUSE_DEFAULT:
1922 case OMP_CLAUSE_IF:
1923 case OMP_CLAUSE_NUM_THREADS:
1924 case OMP_CLAUSE_NUM_TEAMS:
1925 case OMP_CLAUSE_THREAD_LIMIT:
1926 case OMP_CLAUSE_DEVICE:
1927 case OMP_CLAUSE_SCHEDULE:
1928 case OMP_CLAUSE_DIST_SCHEDULE:
1929 case OMP_CLAUSE_NOWAIT:
1930 case OMP_CLAUSE_ORDERED:
1931 case OMP_CLAUSE_COLLAPSE:
1932 case OMP_CLAUSE_UNTIED:
1933 case OMP_CLAUSE_FINAL:
1934 case OMP_CLAUSE_MERGEABLE:
1935 case OMP_CLAUSE_PROC_BIND:
1936 case OMP_CLAUSE_SAFELEN:
1937 case OMP_CLAUSE_SIMDLEN:
1938 case OMP_CLAUSE_ALIGNED:
1939 case OMP_CLAUSE_DEPEND:
1940 case OMP_CLAUSE_DETACH:
1941 case OMP_CLAUSE_ALLOCATE:
1942 case OMP_CLAUSE__LOOPTEMP_:
1943 case OMP_CLAUSE__REDUCTEMP_:
1944 case OMP_CLAUSE_TO:
1945 case OMP_CLAUSE_FROM:
1946 case OMP_CLAUSE_PRIORITY:
1947 case OMP_CLAUSE_GRAINSIZE:
1948 case OMP_CLAUSE_NUM_TASKS:
1949 case OMP_CLAUSE_THREADS:
1950 case OMP_CLAUSE_SIMD:
1951 case OMP_CLAUSE_NOGROUP:
1952 case OMP_CLAUSE_DEFAULTMAP:
1953 case OMP_CLAUSE_ORDER:
1954 case OMP_CLAUSE_BIND:
1955 case OMP_CLAUSE_USE_DEVICE_PTR:
1956 case OMP_CLAUSE_USE_DEVICE_ADDR:
1957 case OMP_CLAUSE_NONTEMPORAL:
1958 case OMP_CLAUSE_ASYNC:
1959 case OMP_CLAUSE_WAIT:
1960 case OMP_CLAUSE_NUM_GANGS:
1961 case OMP_CLAUSE_NUM_WORKERS:
1962 case OMP_CLAUSE_VECTOR_LENGTH:
1963 case OMP_CLAUSE_GANG:
1964 case OMP_CLAUSE_WORKER:
1965 case OMP_CLAUSE_VECTOR:
1966 case OMP_CLAUSE_INDEPENDENT:
1967 case OMP_CLAUSE_AUTO:
1968 case OMP_CLAUSE_SEQ:
1969 case OMP_CLAUSE_TILE:
1970 case OMP_CLAUSE__SIMT_:
1971 case OMP_CLAUSE_IF_PRESENT:
1972 case OMP_CLAUSE_FINALIZE:
1973 case OMP_CLAUSE_FILTER:
1974 case OMP_CLAUSE__CONDTEMP_:
1975 break;
1977 case OMP_CLAUSE__CACHE_:
1978 case OMP_CLAUSE_NOHOST:
1979 default:
1980 gcc_unreachable ();
1984 gcc_checking_assert (!scan_array_reductions
1985 || !is_gimple_omp_oacc (ctx->stmt));
1986 if (scan_array_reductions)
1988 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1989 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1990 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1991 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1992 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1994 omp_context *rctx = ctx;
1995 if (is_omp_target (ctx->stmt))
1996 rctx = ctx->outer;
1997 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1998 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
2000 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
2001 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
2002 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
2003 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
2004 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
2005 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
2009 /* Create a new name for omp child function. Returns an identifier. */
2011 static tree
2012 create_omp_child_function_name (bool task_copy)
2014 return clone_function_name_numbered (current_function_decl,
2015 task_copy ? "_omp_cpyfn" : "_omp_fn");
2018 /* Return true if CTX may belong to offloaded code: either if current function
2019 is offloaded, or any enclosing context corresponds to a target region. */
2021 static bool
2022 omp_maybe_offloaded_ctx (omp_context *ctx)
2024 if (cgraph_node::get (current_function_decl)->offloadable)
2025 return true;
2026 for (; ctx; ctx = ctx->outer)
2027 if (is_gimple_omp_offloaded (ctx->stmt))
2028 return true;
2029 return false;
2032 /* Build a decl for the omp child function. It'll not contain a body
2033 yet, just the bare decl. */
2035 static void
2036 create_omp_child_function (omp_context *ctx, bool task_copy)
2038 tree decl, type, name, t;
2040 name = create_omp_child_function_name (task_copy);
2041 if (task_copy)
2042 type = build_function_type_list (void_type_node, ptr_type_node,
2043 ptr_type_node, NULL_TREE);
2044 else
2045 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2047 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
2049 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
2050 || !task_copy);
2051 if (!task_copy)
2052 ctx->cb.dst_fn = decl;
2053 else
2054 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
2056 TREE_STATIC (decl) = 1;
2057 TREE_USED (decl) = 1;
2058 DECL_ARTIFICIAL (decl) = 1;
2059 DECL_IGNORED_P (decl) = 0;
2060 TREE_PUBLIC (decl) = 0;
2061 DECL_UNINLINABLE (decl) = 1;
2062 DECL_EXTERNAL (decl) = 0;
2063 DECL_CONTEXT (decl) = NULL_TREE;
2064 DECL_INITIAL (decl) = make_node (BLOCK);
2065 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
2066 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
2067 /* Remove omp declare simd attribute from the new attributes. */
2068 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
2070 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
2071 a = a2;
2072 a = TREE_CHAIN (a);
2073 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
2074 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
2075 *p = TREE_CHAIN (*p);
2076 else
2078 tree chain = TREE_CHAIN (*p);
2079 *p = copy_node (*p);
2080 p = &TREE_CHAIN (*p);
2081 *p = chain;
2084 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
2085 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
2086 DECL_FUNCTION_SPECIFIC_TARGET (decl)
2087 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2088 DECL_FUNCTION_VERSIONED (decl)
2089 = DECL_FUNCTION_VERSIONED (current_function_decl);
2091 if (omp_maybe_offloaded_ctx (ctx))
2093 cgraph_node::get_create (decl)->offloadable = 1;
2094 if (ENABLE_OFFLOADING)
2095 g->have_offload = true;
2098 if (cgraph_node::get_create (decl)->offloadable)
2100 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2101 ? "omp target entrypoint"
2102 : "omp declare target");
2103 if (lookup_attribute ("omp declare target",
2104 DECL_ATTRIBUTES (current_function_decl)))
2106 if (is_gimple_omp_offloaded (ctx->stmt))
2107 DECL_ATTRIBUTES (decl)
2108 = remove_attribute ("omp declare target",
2109 copy_list (DECL_ATTRIBUTES (decl)));
2110 else
2111 target_attr = NULL;
2113 if (target_attr
2114 && is_gimple_omp_offloaded (ctx->stmt)
2115 && lookup_attribute ("noclone", DECL_ATTRIBUTES (decl)) == NULL_TREE)
2116 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("noclone"),
2117 NULL_TREE, DECL_ATTRIBUTES (decl));
2118 if (target_attr)
2119 DECL_ATTRIBUTES (decl)
2120 = tree_cons (get_identifier (target_attr),
2121 NULL_TREE, DECL_ATTRIBUTES (decl));
2124 t = build_decl (DECL_SOURCE_LOCATION (decl),
2125 RESULT_DECL, NULL_TREE, void_type_node);
2126 DECL_ARTIFICIAL (t) = 1;
2127 DECL_IGNORED_P (t) = 1;
2128 DECL_CONTEXT (t) = decl;
2129 DECL_RESULT (decl) = t;
2131 tree data_name = get_identifier (".omp_data_i");
2132 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2133 ptr_type_node);
2134 DECL_ARTIFICIAL (t) = 1;
2135 DECL_NAMELESS (t) = 1;
2136 DECL_ARG_TYPE (t) = ptr_type_node;
2137 DECL_CONTEXT (t) = current_function_decl;
2138 TREE_USED (t) = 1;
2139 TREE_READONLY (t) = 1;
2140 DECL_ARGUMENTS (decl) = t;
2141 if (!task_copy)
2142 ctx->receiver_decl = t;
2143 else
2145 t = build_decl (DECL_SOURCE_LOCATION (decl),
2146 PARM_DECL, get_identifier (".omp_data_o"),
2147 ptr_type_node);
2148 DECL_ARTIFICIAL (t) = 1;
2149 DECL_NAMELESS (t) = 1;
2150 DECL_ARG_TYPE (t) = ptr_type_node;
2151 DECL_CONTEXT (t) = current_function_decl;
2152 TREE_USED (t) = 1;
2153 TREE_ADDRESSABLE (t) = 1;
2154 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2155 DECL_ARGUMENTS (decl) = t;
2158 /* Allocate memory for the function structure. The call to
2159 allocate_struct_function clobbers CFUN, so we need to restore
2160 it afterward. */
2161 push_struct_function (decl);
2162 cfun->function_end_locus = gimple_location (ctx->stmt);
2163 init_tree_ssa (cfun);
2164 pop_cfun ();
2167 /* Callback for walk_gimple_seq. Check if combined parallel
2168 contains gimple_omp_for_combined_into_p OMP_FOR. */
2170 tree
2171 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2172 bool *handled_ops_p,
2173 struct walk_stmt_info *wi)
2175 gimple *stmt = gsi_stmt (*gsi_p);
2177 *handled_ops_p = true;
2178 switch (gimple_code (stmt))
2180 WALK_SUBSTMTS;
2182 case GIMPLE_OMP_FOR:
2183 if (gimple_omp_for_combined_into_p (stmt)
2184 && gimple_omp_for_kind (stmt)
2185 == *(const enum gf_mask *) (wi->info))
2187 wi->info = stmt;
2188 return integer_zero_node;
2190 break;
2191 default:
2192 break;
2194 return NULL;
2197 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2199 static void
2200 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2201 omp_context *outer_ctx)
2203 struct walk_stmt_info wi;
2205 memset (&wi, 0, sizeof (wi));
2206 wi.val_only = true;
2207 wi.info = (void *) &msk;
2208 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2209 if (wi.info != (void *) &msk)
2211 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2212 struct omp_for_data fd;
2213 omp_extract_for_data (for_stmt, &fd, NULL);
2214 /* We need two temporaries with fd.loop.v type (istart/iend)
2215 and then (fd.collapse - 1) temporaries with the same
2216 type for count2 ... countN-1 vars if not constant. */
2217 size_t count = 2, i;
2218 tree type = fd.iter_type;
2219 if (fd.collapse > 1
2220 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2222 count += fd.collapse - 1;
2223 /* If there are lastprivate clauses on the inner
2224 GIMPLE_OMP_FOR, add one more temporaries for the total number
2225 of iterations (product of count1 ... countN-1). */
2226 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2227 OMP_CLAUSE_LASTPRIVATE)
2228 || (msk == GF_OMP_FOR_KIND_FOR
2229 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2230 OMP_CLAUSE_LASTPRIVATE)))
2232 tree temp = create_tmp_var (type);
2233 tree c = build_omp_clause (UNKNOWN_LOCATION,
2234 OMP_CLAUSE__LOOPTEMP_);
2235 insert_decl_map (&outer_ctx->cb, temp, temp);
2236 OMP_CLAUSE_DECL (c) = temp;
2237 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2238 gimple_omp_taskreg_set_clauses (stmt, c);
2240 if (fd.non_rect
2241 && fd.last_nonrect == fd.first_nonrect + 1)
2242 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2243 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2245 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2246 tree type2 = TREE_TYPE (v);
2247 count++;
2248 for (i = 0; i < 3; i++)
2250 tree temp = create_tmp_var (type2);
2251 tree c = build_omp_clause (UNKNOWN_LOCATION,
2252 OMP_CLAUSE__LOOPTEMP_);
2253 insert_decl_map (&outer_ctx->cb, temp, temp);
2254 OMP_CLAUSE_DECL (c) = temp;
2255 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2256 gimple_omp_taskreg_set_clauses (stmt, c);
2260 for (i = 0; i < count; i++)
2262 tree temp = create_tmp_var (type);
2263 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2264 insert_decl_map (&outer_ctx->cb, temp, temp);
2265 OMP_CLAUSE_DECL (c) = temp;
2266 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2267 gimple_omp_taskreg_set_clauses (stmt, c);
2270 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2271 && omp_find_clause (gimple_omp_task_clauses (stmt),
2272 OMP_CLAUSE_REDUCTION))
2274 tree type = build_pointer_type (pointer_sized_int_node);
2275 tree temp = create_tmp_var (type);
2276 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2277 insert_decl_map (&outer_ctx->cb, temp, temp);
2278 OMP_CLAUSE_DECL (c) = temp;
2279 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2280 gimple_omp_task_set_clauses (stmt, c);
2284 /* Scan an OpenMP parallel directive. */
2286 static void
2287 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2289 omp_context *ctx;
2290 tree name;
2291 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2293 /* Ignore parallel directives with empty bodies, unless there
2294 are copyin clauses. */
2295 if (optimize > 0
2296 && empty_body_p (gimple_omp_body (stmt))
2297 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2298 OMP_CLAUSE_COPYIN) == NULL)
2300 gsi_replace (gsi, gimple_build_nop (), false);
2301 return;
2304 if (gimple_omp_parallel_combined_p (stmt))
2305 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2306 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2307 OMP_CLAUSE_REDUCTION);
2308 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2309 if (OMP_CLAUSE_REDUCTION_TASK (c))
2311 tree type = build_pointer_type (pointer_sized_int_node);
2312 tree temp = create_tmp_var (type);
2313 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2314 if (outer_ctx)
2315 insert_decl_map (&outer_ctx->cb, temp, temp);
2316 OMP_CLAUSE_DECL (c) = temp;
2317 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2318 gimple_omp_parallel_set_clauses (stmt, c);
2319 break;
2321 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2322 break;
2324 ctx = new_omp_context (stmt, outer_ctx);
2325 taskreg_contexts.safe_push (ctx);
2326 if (taskreg_nesting_level > 1)
2327 ctx->is_nested = true;
2328 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2329 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2330 name = create_tmp_var_name (".omp_data_s");
2331 name = build_decl (gimple_location (stmt),
2332 TYPE_DECL, name, ctx->record_type);
2333 DECL_ARTIFICIAL (name) = 1;
2334 DECL_NAMELESS (name) = 1;
2335 TYPE_NAME (ctx->record_type) = name;
2336 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2337 create_omp_child_function (ctx, false);
2338 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2340 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2341 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2343 if (TYPE_FIELDS (ctx->record_type) == NULL)
2344 ctx->record_type = ctx->receiver_decl = NULL;
2347 /* Scan an OpenMP task directive. */
2349 static void
2350 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2352 omp_context *ctx;
2353 tree name, t;
2354 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2356 /* Ignore task directives with empty bodies, unless they have depend
2357 clause. */
2358 if (optimize > 0
2359 && gimple_omp_body (stmt)
2360 && empty_body_p (gimple_omp_body (stmt))
2361 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2363 gsi_replace (gsi, gimple_build_nop (), false);
2364 return;
2367 if (gimple_omp_task_taskloop_p (stmt))
2368 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2370 ctx = new_omp_context (stmt, outer_ctx);
2372 if (gimple_omp_task_taskwait_p (stmt))
2374 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2375 return;
2378 taskreg_contexts.safe_push (ctx);
2379 if (taskreg_nesting_level > 1)
2380 ctx->is_nested = true;
2381 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2382 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2383 name = create_tmp_var_name (".omp_data_s");
2384 name = build_decl (gimple_location (stmt),
2385 TYPE_DECL, name, ctx->record_type);
2386 DECL_ARTIFICIAL (name) = 1;
2387 DECL_NAMELESS (name) = 1;
2388 TYPE_NAME (ctx->record_type) = name;
2389 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2390 create_omp_child_function (ctx, false);
2391 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2393 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2395 if (ctx->srecord_type)
2397 name = create_tmp_var_name (".omp_data_a");
2398 name = build_decl (gimple_location (stmt),
2399 TYPE_DECL, name, ctx->srecord_type);
2400 DECL_ARTIFICIAL (name) = 1;
2401 DECL_NAMELESS (name) = 1;
2402 TYPE_NAME (ctx->srecord_type) = name;
2403 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2404 create_omp_child_function (ctx, true);
2407 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2409 if (TYPE_FIELDS (ctx->record_type) == NULL)
2411 ctx->record_type = ctx->receiver_decl = NULL;
2412 t = build_int_cst (long_integer_type_node, 0);
2413 gimple_omp_task_set_arg_size (stmt, t);
2414 t = build_int_cst (long_integer_type_node, 1);
2415 gimple_omp_task_set_arg_align (stmt, t);
2419 /* Helper function for finish_taskreg_scan, called through walk_tree.
2420 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2421 tree, replace it in the expression. */
2423 static tree
2424 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2426 if (VAR_P (*tp))
2428 omp_context *ctx = (omp_context *) data;
2429 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2430 if (t != *tp)
2432 if (DECL_HAS_VALUE_EXPR_P (t))
2433 t = unshare_expr (DECL_VALUE_EXPR (t));
2434 *tp = t;
2436 *walk_subtrees = 0;
2438 else if (IS_TYPE_OR_DECL_P (*tp))
2439 *walk_subtrees = 0;
2440 return NULL_TREE;
2443 /* If any decls have been made addressable during scan_omp,
2444 adjust their fields if needed, and layout record types
2445 of parallel/task constructs. */
2447 static void
2448 finish_taskreg_scan (omp_context *ctx)
2450 if (ctx->record_type == NULL_TREE)
2451 return;
2453 /* If any make_addressable_vars were needed, verify all
2454 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2455 statements if use_pointer_for_field hasn't changed
2456 because of that. If it did, update field types now. */
2457 if (make_addressable_vars)
2459 tree c;
2461 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2462 c; c = OMP_CLAUSE_CHAIN (c))
2463 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2464 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2466 tree decl = OMP_CLAUSE_DECL (c);
2468 /* Global variables don't need to be copied,
2469 the receiver side will use them directly. */
2470 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2471 continue;
2472 if (!bitmap_bit_p (make_addressable_vars, DECL_UID (decl))
2473 || !use_pointer_for_field (decl, ctx))
2474 continue;
2475 tree field = lookup_field (decl, ctx);
2476 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2477 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2478 continue;
2479 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2480 TREE_THIS_VOLATILE (field) = 0;
2481 DECL_USER_ALIGN (field) = 0;
2482 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2483 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2484 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2485 if (ctx->srecord_type)
2487 tree sfield = lookup_sfield (decl, ctx);
2488 TREE_TYPE (sfield) = TREE_TYPE (field);
2489 TREE_THIS_VOLATILE (sfield) = 0;
2490 DECL_USER_ALIGN (sfield) = 0;
2491 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2492 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2493 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2498 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2500 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2501 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2502 if (c)
2504 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2505 expects to find it at the start of data. */
2506 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2507 tree *p = &TYPE_FIELDS (ctx->record_type);
2508 while (*p)
2509 if (*p == f)
2511 *p = DECL_CHAIN (*p);
2512 break;
2514 else
2515 p = &DECL_CHAIN (*p);
2516 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2517 TYPE_FIELDS (ctx->record_type) = f;
2519 layout_type (ctx->record_type);
2520 fixup_child_record_type (ctx);
2522 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2524 layout_type (ctx->record_type);
2525 fixup_child_record_type (ctx);
2527 else
2529 location_t loc = gimple_location (ctx->stmt);
2530 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2531 tree detach_clause
2532 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2533 OMP_CLAUSE_DETACH);
2534 /* Move VLA fields to the end. */
2535 p = &TYPE_FIELDS (ctx->record_type);
2536 while (*p)
2537 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2538 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2540 *q = *p;
2541 *p = TREE_CHAIN (*p);
2542 TREE_CHAIN (*q) = NULL_TREE;
2543 q = &TREE_CHAIN (*q);
2545 else
2546 p = &DECL_CHAIN (*p);
2547 *p = vla_fields;
2548 if (gimple_omp_task_taskloop_p (ctx->stmt))
2550 /* Move fields corresponding to first and second _looptemp_
2551 clause first. There are filled by GOMP_taskloop
2552 and thus need to be in specific positions. */
2553 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2554 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2555 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2556 OMP_CLAUSE__LOOPTEMP_);
2557 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2558 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2559 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2560 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2561 p = &TYPE_FIELDS (ctx->record_type);
2562 while (*p)
2563 if (*p == f1 || *p == f2 || *p == f3)
2564 *p = DECL_CHAIN (*p);
2565 else
2566 p = &DECL_CHAIN (*p);
2567 DECL_CHAIN (f1) = f2;
2568 if (c3)
2570 DECL_CHAIN (f2) = f3;
2571 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2573 else
2574 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2575 TYPE_FIELDS (ctx->record_type) = f1;
2576 if (ctx->srecord_type)
2578 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2579 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2580 if (c3)
2581 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2582 p = &TYPE_FIELDS (ctx->srecord_type);
2583 while (*p)
2584 if (*p == f1 || *p == f2 || *p == f3)
2585 *p = DECL_CHAIN (*p);
2586 else
2587 p = &DECL_CHAIN (*p);
2588 DECL_CHAIN (f1) = f2;
2589 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2590 if (c3)
2592 DECL_CHAIN (f2) = f3;
2593 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2595 else
2596 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2597 TYPE_FIELDS (ctx->srecord_type) = f1;
2600 if (detach_clause)
2602 tree c, field;
2604 /* Look for a firstprivate clause with the detach event handle. */
2605 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2606 c; c = OMP_CLAUSE_CHAIN (c))
2608 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2609 continue;
2610 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2611 == OMP_CLAUSE_DECL (detach_clause))
2612 break;
2615 gcc_assert (c);
2616 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2618 /* Move field corresponding to the detach clause first.
2619 This is filled by GOMP_task and needs to be in a
2620 specific position. */
2621 p = &TYPE_FIELDS (ctx->record_type);
2622 while (*p)
2623 if (*p == field)
2624 *p = DECL_CHAIN (*p);
2625 else
2626 p = &DECL_CHAIN (*p);
2627 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2628 TYPE_FIELDS (ctx->record_type) = field;
2629 if (ctx->srecord_type)
2631 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2632 p = &TYPE_FIELDS (ctx->srecord_type);
2633 while (*p)
2634 if (*p == field)
2635 *p = DECL_CHAIN (*p);
2636 else
2637 p = &DECL_CHAIN (*p);
2638 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2639 TYPE_FIELDS (ctx->srecord_type) = field;
2642 layout_type (ctx->record_type);
2643 fixup_child_record_type (ctx);
2644 if (ctx->srecord_type)
2645 layout_type (ctx->srecord_type);
2646 tree t = fold_convert_loc (loc, long_integer_type_node,
2647 TYPE_SIZE_UNIT (ctx->record_type));
2648 if (TREE_CODE (t) != INTEGER_CST)
2650 t = unshare_expr (t);
2651 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2653 gimple_omp_task_set_arg_size (ctx->stmt, t);
2654 t = build_int_cst (long_integer_type_node,
2655 TYPE_ALIGN_UNIT (ctx->record_type));
2656 gimple_omp_task_set_arg_align (ctx->stmt, t);
2660 /* Find the enclosing offload context. */
2662 static omp_context *
2663 enclosing_target_ctx (omp_context *ctx)
2665 for (; ctx; ctx = ctx->outer)
2666 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2667 break;
2669 return ctx;
2672 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2673 construct.
2674 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2676 static bool
2677 ctx_in_oacc_kernels_region (omp_context *ctx)
2679 for (;ctx != NULL; ctx = ctx->outer)
2681 gimple *stmt = ctx->stmt;
2682 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2683 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2684 return true;
2687 return false;
2690 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2691 (This doesn't include OpenACC 'kernels' decomposed parts.)
2692 Until kernels handling moves to use the same loop indirection
2693 scheme as parallel, we need to do this checking early. */
2695 static unsigned
2696 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2698 bool checking = true;
2699 unsigned outer_mask = 0;
2700 unsigned this_mask = 0;
2701 bool has_seq = false, has_auto = false;
2703 if (ctx->outer)
2704 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2705 if (!stmt)
2707 checking = false;
2708 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2709 return outer_mask;
2710 stmt = as_a <gomp_for *> (ctx->stmt);
2713 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2715 switch (OMP_CLAUSE_CODE (c))
2717 case OMP_CLAUSE_GANG:
2718 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2719 break;
2720 case OMP_CLAUSE_WORKER:
2721 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2722 break;
2723 case OMP_CLAUSE_VECTOR:
2724 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2725 break;
2726 case OMP_CLAUSE_SEQ:
2727 has_seq = true;
2728 break;
2729 case OMP_CLAUSE_AUTO:
2730 has_auto = true;
2731 break;
2732 default:
2733 break;
2737 if (checking)
2739 if (has_seq && (this_mask || has_auto))
2740 error_at (gimple_location (stmt), "%<seq%> overrides other"
2741 " OpenACC loop specifiers");
2742 else if (has_auto && this_mask)
2743 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2744 " OpenACC loop specifiers");
2746 if (this_mask & outer_mask)
2747 error_at (gimple_location (stmt), "inner loop uses same"
2748 " OpenACC parallelism as containing loop");
2751 return outer_mask | this_mask;
2754 /* Scan a GIMPLE_OMP_FOR. */
2756 static omp_context *
2757 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2759 omp_context *ctx;
2760 size_t i;
2761 tree clauses = gimple_omp_for_clauses (stmt);
2763 ctx = new_omp_context (stmt, outer_ctx);
2765 if (is_gimple_omp_oacc (stmt))
2767 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2769 if (!(tgt && is_oacc_kernels (tgt)))
2770 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2772 tree c_op0;
2773 switch (OMP_CLAUSE_CODE (c))
2775 case OMP_CLAUSE_GANG:
2776 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2777 break;
2779 case OMP_CLAUSE_WORKER:
2780 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2781 break;
2783 case OMP_CLAUSE_VECTOR:
2784 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2785 break;
2787 default:
2788 continue;
2791 if (c_op0)
2793 /* By construction, this is impossible for OpenACC 'kernels'
2794 decomposed parts. */
2795 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2797 error_at (OMP_CLAUSE_LOCATION (c),
2798 "argument not permitted on %qs clause",
2799 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2800 if (tgt)
2801 inform (gimple_location (tgt->stmt),
2802 "enclosing parent compute construct");
2803 else if (oacc_get_fn_attrib (current_function_decl))
2804 inform (DECL_SOURCE_LOCATION (current_function_decl),
2805 "enclosing routine");
2806 else
2807 gcc_unreachable ();
2811 if (tgt && is_oacc_kernels (tgt))
2812 check_oacc_kernel_gwv (stmt, ctx);
2814 /* Collect all variables named in reductions on this loop. Ensure
2815 that, if this loop has a reduction on some variable v, and there is
2816 a reduction on v somewhere in an outer context, then there is a
2817 reduction on v on all intervening loops as well. */
2818 tree local_reduction_clauses = NULL;
2819 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2821 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2822 local_reduction_clauses
2823 = tree_cons (NULL, c, local_reduction_clauses);
2825 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2826 ctx->outer_reduction_clauses
2827 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2828 ctx->outer->outer_reduction_clauses);
2829 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2830 tree local_iter = local_reduction_clauses;
2831 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2833 tree local_clause = TREE_VALUE (local_iter);
2834 tree local_var = OMP_CLAUSE_DECL (local_clause);
2835 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2836 bool have_outer_reduction = false;
2837 tree ctx_iter = outer_reduction_clauses;
2838 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2840 tree outer_clause = TREE_VALUE (ctx_iter);
2841 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2842 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2843 if (outer_var == local_var && outer_op != local_op)
2845 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2846 "conflicting reduction operations for %qE",
2847 local_var);
2848 inform (OMP_CLAUSE_LOCATION (outer_clause),
2849 "location of the previous reduction for %qE",
2850 outer_var);
2852 if (outer_var == local_var)
2854 have_outer_reduction = true;
2855 break;
2858 if (have_outer_reduction)
2860 /* There is a reduction on outer_var both on this loop and on
2861 some enclosing loop. Walk up the context tree until such a
2862 loop with a reduction on outer_var is found, and complain
2863 about all intervening loops that do not have such a
2864 reduction. */
2865 struct omp_context *curr_loop = ctx->outer;
2866 bool found = false;
2867 while (curr_loop != NULL)
2869 tree curr_iter = curr_loop->local_reduction_clauses;
2870 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2872 tree curr_clause = TREE_VALUE (curr_iter);
2873 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2874 if (curr_var == local_var)
2876 found = true;
2877 break;
2880 if (!found)
2881 warning_at (gimple_location (curr_loop->stmt), 0,
2882 "nested loop in reduction needs "
2883 "reduction clause for %qE",
2884 local_var);
2885 else
2886 break;
2887 curr_loop = curr_loop->outer;
2891 ctx->local_reduction_clauses = local_reduction_clauses;
2892 ctx->outer_reduction_clauses
2893 = chainon (unshare_expr (ctx->local_reduction_clauses),
2894 ctx->outer_reduction_clauses);
2896 if (tgt && is_oacc_kernels (tgt))
2898 /* Strip out reductions, as they are not handled yet. */
2899 tree *prev_ptr = &clauses;
2901 while (tree probe = *prev_ptr)
2903 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2905 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2906 *prev_ptr = *next_ptr;
2907 else
2908 prev_ptr = next_ptr;
2911 gimple_omp_for_set_clauses (stmt, clauses);
2915 scan_sharing_clauses (clauses, ctx);
2917 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2918 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2920 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2921 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2922 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2923 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2925 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2926 return ctx;
2929 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2931 static void
2932 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2933 omp_context *outer_ctx)
2935 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2936 gsi_replace (gsi, bind, false);
2937 gimple_seq seq = NULL;
2938 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2939 tree cond = create_tmp_var_raw (integer_type_node);
2940 DECL_CONTEXT (cond) = current_function_decl;
2941 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2942 gimple_bind_set_vars (bind, cond);
2943 gimple_call_set_lhs (g, cond);
2944 gimple_seq_add_stmt (&seq, g);
2945 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2946 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2947 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2948 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2949 gimple_seq_add_stmt (&seq, g);
2950 g = gimple_build_label (lab1);
2951 gimple_seq_add_stmt (&seq, g);
2952 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2953 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2954 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2955 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2956 gimple_omp_for_set_clauses (new_stmt, clause);
2957 gimple_seq_add_stmt (&seq, new_stmt);
2958 g = gimple_build_goto (lab3);
2959 gimple_seq_add_stmt (&seq, g);
2960 g = gimple_build_label (lab2);
2961 gimple_seq_add_stmt (&seq, g);
2962 gimple_seq_add_stmt (&seq, stmt);
2963 g = gimple_build_label (lab3);
2964 gimple_seq_add_stmt (&seq, g);
2965 gimple_bind_set_body (bind, seq);
2966 update_stmt (bind);
2967 scan_omp_for (new_stmt, outer_ctx);
2968 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2971 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2972 struct walk_stmt_info *);
2973 static omp_context *maybe_lookup_ctx (gimple *);
2975 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2976 for scan phase loop. */
2978 static void
2979 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2980 omp_context *outer_ctx)
2982 /* The only change between inclusive and exclusive scan will be
2983 within the first simd loop, so just use inclusive in the
2984 worksharing loop. */
2985 outer_ctx->scan_inclusive = true;
2986 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2987 OMP_CLAUSE_DECL (c) = integer_zero_node;
2989 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2990 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2991 gsi_replace (gsi, input_stmt, false);
2992 gimple_seq input_body = NULL;
2993 gimple_seq_add_stmt (&input_body, stmt);
2994 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2996 gimple_stmt_iterator input1_gsi = gsi_none ();
2997 struct walk_stmt_info wi;
2998 memset (&wi, 0, sizeof (wi));
2999 wi.val_only = true;
3000 wi.info = (void *) &input1_gsi;
3001 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
3002 gcc_assert (!gsi_end_p (input1_gsi));
3004 gimple *input_stmt1 = gsi_stmt (input1_gsi);
3005 gsi_next (&input1_gsi);
3006 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
3007 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
3008 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
3009 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3010 std::swap (input_stmt1, scan_stmt1);
3012 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
3013 gimple_omp_set_body (input_stmt1, NULL);
3015 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
3016 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
3018 gimple_omp_set_body (input_stmt1, input_body1);
3019 gimple_omp_set_body (scan_stmt1, NULL);
3021 gimple_stmt_iterator input2_gsi = gsi_none ();
3022 memset (&wi, 0, sizeof (wi));
3023 wi.val_only = true;
3024 wi.info = (void *) &input2_gsi;
3025 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
3026 NULL, &wi);
3027 gcc_assert (!gsi_end_p (input2_gsi));
3029 gimple *input_stmt2 = gsi_stmt (input2_gsi);
3030 gsi_next (&input2_gsi);
3031 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
3032 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
3033 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3034 std::swap (input_stmt2, scan_stmt2);
3036 gimple_omp_set_body (input_stmt2, NULL);
3038 gimple_omp_set_body (input_stmt, input_body);
3039 gimple_omp_set_body (scan_stmt, scan_body);
3041 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
3042 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
3044 ctx = new_omp_context (scan_stmt, outer_ctx);
3045 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
3047 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
3050 /* Scan an OpenMP sections directive. */
3052 static void
3053 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
3055 omp_context *ctx;
3057 ctx = new_omp_context (stmt, outer_ctx);
3058 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
3059 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3062 /* Scan an OpenMP single directive. */
3064 static void
3065 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
3067 omp_context *ctx;
3068 tree name;
3070 ctx = new_omp_context (stmt, outer_ctx);
3071 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3072 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3073 name = create_tmp_var_name (".omp_copy_s");
3074 name = build_decl (gimple_location (stmt),
3075 TYPE_DECL, name, ctx->record_type);
3076 TYPE_NAME (ctx->record_type) = name;
3078 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
3079 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3081 if (TYPE_FIELDS (ctx->record_type) == NULL)
3082 ctx->record_type = NULL;
3083 else
3084 layout_type (ctx->record_type);
3087 /* Scan a GIMPLE_OMP_TARGET. */
3089 static void
3090 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3092 omp_context *ctx;
3093 tree name;
3094 bool offloaded = is_gimple_omp_offloaded (stmt);
3095 tree clauses = gimple_omp_target_clauses (stmt);
3097 ctx = new_omp_context (stmt, outer_ctx);
3098 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3099 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3100 name = create_tmp_var_name (".omp_data_t");
3101 name = build_decl (gimple_location (stmt),
3102 TYPE_DECL, name, ctx->record_type);
3103 DECL_ARTIFICIAL (name) = 1;
3104 DECL_NAMELESS (name) = 1;
3105 TYPE_NAME (ctx->record_type) = name;
3106 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3108 if (offloaded)
3110 create_omp_child_function (ctx, false);
3111 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3114 scan_sharing_clauses (clauses, ctx);
3115 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3117 if (TYPE_FIELDS (ctx->record_type) == NULL)
3118 ctx->record_type = ctx->receiver_decl = NULL;
3119 else
3121 TYPE_FIELDS (ctx->record_type)
3122 = nreverse (TYPE_FIELDS (ctx->record_type));
3123 if (flag_checking)
3125 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3126 for (tree field = TYPE_FIELDS (ctx->record_type);
3127 field;
3128 field = DECL_CHAIN (field))
3129 gcc_assert (DECL_ALIGN (field) == align);
3131 layout_type (ctx->record_type);
3132 if (offloaded)
3133 fixup_child_record_type (ctx);
3136 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3138 error_at (gimple_location (stmt),
3139 "%<target%> construct with nested %<teams%> construct "
3140 "contains directives outside of the %<teams%> construct");
3141 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3145 /* Scan an OpenMP teams directive. */
3147 static void
3148 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3150 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3152 if (!gimple_omp_teams_host (stmt))
3154 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3155 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3156 return;
3158 taskreg_contexts.safe_push (ctx);
3159 gcc_assert (taskreg_nesting_level == 1);
3160 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3161 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3162 tree name = create_tmp_var_name (".omp_data_s");
3163 name = build_decl (gimple_location (stmt),
3164 TYPE_DECL, name, ctx->record_type);
3165 DECL_ARTIFICIAL (name) = 1;
3166 DECL_NAMELESS (name) = 1;
3167 TYPE_NAME (ctx->record_type) = name;
3168 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3169 create_omp_child_function (ctx, false);
3170 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3172 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3173 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3175 if (TYPE_FIELDS (ctx->record_type) == NULL)
3176 ctx->record_type = ctx->receiver_decl = NULL;
3179 /* Check nesting restrictions. */
3180 static bool
3181 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3183 tree c;
3185 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3186 inside an OpenACC CTX. */
3187 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3188 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3189 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3191 else if (!(is_gimple_omp (stmt)
3192 && is_gimple_omp_oacc (stmt)))
3194 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3196 error_at (gimple_location (stmt),
3197 "non-OpenACC construct inside of OpenACC routine");
3198 return false;
3200 else
3201 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3202 if (is_gimple_omp (octx->stmt)
3203 && is_gimple_omp_oacc (octx->stmt))
3205 error_at (gimple_location (stmt),
3206 "non-OpenACC construct inside of OpenACC region");
3207 return false;
3211 if (ctx != NULL)
3213 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3214 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3216 c = omp_find_clause (gimple_omp_target_clauses (ctx->stmt),
3217 OMP_CLAUSE_DEVICE);
3218 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3220 error_at (gimple_location (stmt),
3221 "OpenMP constructs are not allowed in target region "
3222 "with %<ancestor%>");
3223 return false;
3226 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3227 ctx->teams_nested_p = true;
3228 else
3229 ctx->nonteams_nested_p = true;
3231 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3232 && ctx->outer
3233 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3234 ctx = ctx->outer;
3235 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3236 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3237 && !ctx->loop_p)
3239 c = NULL_TREE;
3240 if (ctx->order_concurrent
3241 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3242 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3243 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3245 error_at (gimple_location (stmt),
3246 "OpenMP constructs other than %<parallel%>, %<loop%>"
3247 " or %<simd%> may not be nested inside a region with"
3248 " the %<order(concurrent)%> clause");
3249 return false;
3251 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3253 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3254 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3256 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3257 && (ctx->outer == NULL
3258 || !gimple_omp_for_combined_into_p (ctx->stmt)
3259 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3260 || (gimple_omp_for_kind (ctx->outer->stmt)
3261 != GF_OMP_FOR_KIND_FOR)
3262 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3264 error_at (gimple_location (stmt),
3265 "%<ordered simd threads%> must be closely "
3266 "nested inside of %<%s simd%> region",
3267 lang_GNU_Fortran () ? "do" : "for");
3268 return false;
3270 return true;
3273 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3274 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3275 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3276 return true;
3277 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3278 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3279 return true;
3280 error_at (gimple_location (stmt),
3281 "OpenMP constructs other than "
3282 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3283 "not be nested inside %<simd%> region");
3284 return false;
3286 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3288 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3289 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3290 && omp_find_clause (gimple_omp_for_clauses (stmt),
3291 OMP_CLAUSE_BIND) == NULL_TREE))
3292 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3294 error_at (gimple_location (stmt),
3295 "only %<distribute%>, %<parallel%> or %<loop%> "
3296 "regions are allowed to be strictly nested inside "
3297 "%<teams%> region");
3298 return false;
3301 else if (ctx->order_concurrent
3302 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3303 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3304 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3305 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3307 if (ctx->loop_p)
3308 error_at (gimple_location (stmt),
3309 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3310 "%<simd%> may not be nested inside a %<loop%> region");
3311 else
3312 error_at (gimple_location (stmt),
3313 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3314 "%<simd%> may not be nested inside a region with "
3315 "the %<order(concurrent)%> clause");
3316 return false;
3319 switch (gimple_code (stmt))
3321 case GIMPLE_OMP_FOR:
3322 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3323 return true;
3324 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3326 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3328 error_at (gimple_location (stmt),
3329 "%<distribute%> region must be strictly nested "
3330 "inside %<teams%> construct");
3331 return false;
3333 return true;
3335 /* We split taskloop into task and nested taskloop in it. */
3336 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3337 return true;
3338 /* For now, hope this will change and loop bind(parallel) will not
3339 be allowed in lots of contexts. */
3340 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3341 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3342 return true;
3343 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3345 bool ok = false;
3347 if (ctx)
3348 switch (gimple_code (ctx->stmt))
3350 case GIMPLE_OMP_FOR:
3351 ok = (gimple_omp_for_kind (ctx->stmt)
3352 == GF_OMP_FOR_KIND_OACC_LOOP);
3353 break;
3355 case GIMPLE_OMP_TARGET:
3356 switch (gimple_omp_target_kind (ctx->stmt))
3358 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3359 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3360 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3361 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3362 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3363 ok = true;
3364 break;
3366 default:
3367 break;
3370 default:
3371 break;
3373 else if (oacc_get_fn_attrib (current_function_decl))
3374 ok = true;
3375 if (!ok)
3377 error_at (gimple_location (stmt),
3378 "OpenACC loop directive must be associated with"
3379 " an OpenACC compute region");
3380 return false;
3383 /* FALLTHRU */
3384 case GIMPLE_CALL:
3385 if (is_gimple_call (stmt)
3386 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3387 == BUILT_IN_GOMP_CANCEL
3388 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3389 == BUILT_IN_GOMP_CANCELLATION_POINT))
3391 const char *bad = NULL;
3392 const char *kind = NULL;
3393 const char *construct
3394 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3395 == BUILT_IN_GOMP_CANCEL)
3396 ? "cancel"
3397 : "cancellation point";
3398 if (ctx == NULL)
3400 error_at (gimple_location (stmt), "orphaned %qs construct",
3401 construct);
3402 return false;
3404 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3405 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3406 : 0)
3408 case 1:
3409 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3410 bad = "parallel";
3411 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3412 == BUILT_IN_GOMP_CANCEL
3413 && !integer_zerop (gimple_call_arg (stmt, 1)))
3414 ctx->cancellable = true;
3415 kind = "parallel";
3416 break;
3417 case 2:
3418 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3419 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3420 bad = "for";
3421 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3422 == BUILT_IN_GOMP_CANCEL
3423 && !integer_zerop (gimple_call_arg (stmt, 1)))
3425 ctx->cancellable = true;
3426 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3427 OMP_CLAUSE_NOWAIT))
3428 warning_at (gimple_location (stmt), 0,
3429 "%<cancel for%> inside "
3430 "%<nowait%> for construct");
3431 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3432 OMP_CLAUSE_ORDERED))
3433 warning_at (gimple_location (stmt), 0,
3434 "%<cancel for%> inside "
3435 "%<ordered%> for construct");
3437 kind = "for";
3438 break;
3439 case 4:
3440 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3441 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3442 bad = "sections";
3443 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3444 == BUILT_IN_GOMP_CANCEL
3445 && !integer_zerop (gimple_call_arg (stmt, 1)))
3447 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3449 ctx->cancellable = true;
3450 if (omp_find_clause (gimple_omp_sections_clauses
3451 (ctx->stmt),
3452 OMP_CLAUSE_NOWAIT))
3453 warning_at (gimple_location (stmt), 0,
3454 "%<cancel sections%> inside "
3455 "%<nowait%> sections construct");
3457 else
3459 gcc_assert (ctx->outer
3460 && gimple_code (ctx->outer->stmt)
3461 == GIMPLE_OMP_SECTIONS);
3462 ctx->outer->cancellable = true;
3463 if (omp_find_clause (gimple_omp_sections_clauses
3464 (ctx->outer->stmt),
3465 OMP_CLAUSE_NOWAIT))
3466 warning_at (gimple_location (stmt), 0,
3467 "%<cancel sections%> inside "
3468 "%<nowait%> sections construct");
3471 kind = "sections";
3472 break;
3473 case 8:
3474 if (!is_task_ctx (ctx)
3475 && (!is_taskloop_ctx (ctx)
3476 || ctx->outer == NULL
3477 || !is_task_ctx (ctx->outer)))
3478 bad = "task";
3479 else
3481 for (omp_context *octx = ctx->outer;
3482 octx; octx = octx->outer)
3484 switch (gimple_code (octx->stmt))
3486 case GIMPLE_OMP_TASKGROUP:
3487 break;
3488 case GIMPLE_OMP_TARGET:
3489 if (gimple_omp_target_kind (octx->stmt)
3490 != GF_OMP_TARGET_KIND_REGION)
3491 continue;
3492 /* FALLTHRU */
3493 case GIMPLE_OMP_PARALLEL:
3494 case GIMPLE_OMP_TEAMS:
3495 error_at (gimple_location (stmt),
3496 "%<%s taskgroup%> construct not closely "
3497 "nested inside of %<taskgroup%> region",
3498 construct);
3499 return false;
3500 case GIMPLE_OMP_TASK:
3501 if (gimple_omp_task_taskloop_p (octx->stmt)
3502 && octx->outer
3503 && is_taskloop_ctx (octx->outer))
3505 tree clauses
3506 = gimple_omp_for_clauses (octx->outer->stmt);
3507 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3508 break;
3510 continue;
3511 default:
3512 continue;
3514 break;
3516 ctx->cancellable = true;
3518 kind = "taskgroup";
3519 break;
3520 default:
3521 error_at (gimple_location (stmt), "invalid arguments");
3522 return false;
3524 if (bad)
3526 error_at (gimple_location (stmt),
3527 "%<%s %s%> construct not closely nested inside of %qs",
3528 construct, kind, bad);
3529 return false;
3532 /* FALLTHRU */
3533 case GIMPLE_OMP_SECTIONS:
3534 case GIMPLE_OMP_SINGLE:
3535 for (; ctx != NULL; ctx = ctx->outer)
3536 switch (gimple_code (ctx->stmt))
3538 case GIMPLE_OMP_FOR:
3539 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3540 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3541 break;
3542 /* FALLTHRU */
3543 case GIMPLE_OMP_SECTIONS:
3544 case GIMPLE_OMP_SINGLE:
3545 case GIMPLE_OMP_ORDERED:
3546 case GIMPLE_OMP_MASTER:
3547 case GIMPLE_OMP_MASKED:
3548 case GIMPLE_OMP_TASK:
3549 case GIMPLE_OMP_CRITICAL:
3550 if (is_gimple_call (stmt))
3552 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3553 != BUILT_IN_GOMP_BARRIER)
3554 return true;
3555 error_at (gimple_location (stmt),
3556 "barrier region may not be closely nested inside "
3557 "of work-sharing, %<loop%>, %<critical%>, "
3558 "%<ordered%>, %<master%>, %<masked%>, explicit "
3559 "%<task%> or %<taskloop%> region");
3560 return false;
3562 error_at (gimple_location (stmt),
3563 "work-sharing region may not be closely nested inside "
3564 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3565 "%<master%>, %<masked%>, explicit %<task%> or "
3566 "%<taskloop%> region");
3567 return false;
3568 case GIMPLE_OMP_PARALLEL:
3569 case GIMPLE_OMP_TEAMS:
3570 return true;
3571 case GIMPLE_OMP_TARGET:
3572 if (gimple_omp_target_kind (ctx->stmt)
3573 == GF_OMP_TARGET_KIND_REGION)
3574 return true;
3575 break;
3576 default:
3577 break;
3579 break;
3580 case GIMPLE_OMP_MASTER:
3581 case GIMPLE_OMP_MASKED:
3582 for (; ctx != NULL; ctx = ctx->outer)
3583 switch (gimple_code (ctx->stmt))
3585 case GIMPLE_OMP_FOR:
3586 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3587 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3588 break;
3589 /* FALLTHRU */
3590 case GIMPLE_OMP_SECTIONS:
3591 case GIMPLE_OMP_SINGLE:
3592 case GIMPLE_OMP_TASK:
3593 error_at (gimple_location (stmt),
3594 "%qs region may not be closely nested inside "
3595 "of work-sharing, %<loop%>, explicit %<task%> or "
3596 "%<taskloop%> region",
3597 gimple_code (stmt) == GIMPLE_OMP_MASTER
3598 ? "master" : "masked");
3599 return false;
3600 case GIMPLE_OMP_PARALLEL:
3601 case GIMPLE_OMP_TEAMS:
3602 return true;
3603 case GIMPLE_OMP_TARGET:
3604 if (gimple_omp_target_kind (ctx->stmt)
3605 == GF_OMP_TARGET_KIND_REGION)
3606 return true;
3607 break;
3608 default:
3609 break;
3611 break;
3612 case GIMPLE_OMP_SCOPE:
3613 for (; ctx != NULL; ctx = ctx->outer)
3614 switch (gimple_code (ctx->stmt))
3616 case GIMPLE_OMP_FOR:
3617 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3618 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3619 break;
3620 /* FALLTHRU */
3621 case GIMPLE_OMP_SECTIONS:
3622 case GIMPLE_OMP_SINGLE:
3623 case GIMPLE_OMP_TASK:
3624 case GIMPLE_OMP_CRITICAL:
3625 case GIMPLE_OMP_ORDERED:
3626 case GIMPLE_OMP_MASTER:
3627 case GIMPLE_OMP_MASKED:
3628 error_at (gimple_location (stmt),
3629 "%<scope%> region may not be closely nested inside "
3630 "of work-sharing, %<loop%>, explicit %<task%>, "
3631 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3632 "or %<masked%> region");
3633 return false;
3634 case GIMPLE_OMP_PARALLEL:
3635 case GIMPLE_OMP_TEAMS:
3636 return true;
3637 case GIMPLE_OMP_TARGET:
3638 if (gimple_omp_target_kind (ctx->stmt)
3639 == GF_OMP_TARGET_KIND_REGION)
3640 return true;
3641 break;
3642 default:
3643 break;
3645 break;
3646 case GIMPLE_OMP_TASK:
3647 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3648 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS)
3650 enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c);
3651 error_at (OMP_CLAUSE_LOCATION (c),
3652 "%<%s(%s)%> is only allowed in %<omp ordered%>",
3653 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross",
3654 kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink");
3655 return false;
3657 break;
3658 case GIMPLE_OMP_ORDERED:
3659 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3660 c; c = OMP_CLAUSE_CHAIN (c))
3662 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS)
3664 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
3666 error_at (OMP_CLAUSE_LOCATION (c),
3667 "invalid depend kind in omp %<ordered%> %<depend%>");
3668 return false;
3670 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3671 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3672 continue;
3675 tree oclause;
3676 /* Look for containing ordered(N) loop. */
3677 if (ctx == NULL
3678 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3679 || (oclause
3680 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3681 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3683 error_at (OMP_CLAUSE_LOCATION (c),
3684 "%<ordered%> construct with %<depend%> clause "
3685 "must be closely nested inside an %<ordered%> loop");
3686 return false;
3689 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3690 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3692 /* ordered simd must be closely nested inside of simd region,
3693 and simd region must not encounter constructs other than
3694 ordered simd, therefore ordered simd may be either orphaned,
3695 or ctx->stmt must be simd. The latter case is handled already
3696 earlier. */
3697 if (ctx != NULL)
3699 error_at (gimple_location (stmt),
3700 "%<ordered%> %<simd%> must be closely nested inside "
3701 "%<simd%> region");
3702 return false;
3705 for (; ctx != NULL; ctx = ctx->outer)
3706 switch (gimple_code (ctx->stmt))
3708 case GIMPLE_OMP_CRITICAL:
3709 case GIMPLE_OMP_TASK:
3710 case GIMPLE_OMP_ORDERED:
3711 ordered_in_taskloop:
3712 error_at (gimple_location (stmt),
3713 "%<ordered%> region may not be closely nested inside "
3714 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3715 "%<taskloop%> region");
3716 return false;
3717 case GIMPLE_OMP_FOR:
3718 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3719 goto ordered_in_taskloop;
3720 tree o;
3721 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3722 OMP_CLAUSE_ORDERED);
3723 if (o == NULL)
3725 error_at (gimple_location (stmt),
3726 "%<ordered%> region must be closely nested inside "
3727 "a loop region with an %<ordered%> clause");
3728 return false;
3730 if (!gimple_omp_ordered_standalone_p (stmt))
3732 if (OMP_CLAUSE_ORDERED_DOACROSS (o))
3734 error_at (gimple_location (stmt),
3735 "%<ordered%> construct without %<doacross%> or "
3736 "%<depend%> clauses must not have the same "
3737 "binding region as %<ordered%> construct with "
3738 "those clauses");
3739 return false;
3741 else if (OMP_CLAUSE_ORDERED_EXPR (o))
3743 tree co
3744 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3745 OMP_CLAUSE_COLLAPSE);
3746 HOST_WIDE_INT
3747 o_n = tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (o));
3748 HOST_WIDE_INT c_n = 1;
3749 if (co)
3750 c_n = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (co));
3751 if (o_n != c_n)
3753 error_at (gimple_location (stmt),
3754 "%<ordered%> construct without %<doacross%> "
3755 "or %<depend%> clauses binds to loop where "
3756 "%<collapse%> argument %wd is different from "
3757 "%<ordered%> argument %wd", c_n, o_n);
3758 return false;
3762 return true;
3763 case GIMPLE_OMP_TARGET:
3764 if (gimple_omp_target_kind (ctx->stmt)
3765 != GF_OMP_TARGET_KIND_REGION)
3766 break;
3767 /* FALLTHRU */
3768 case GIMPLE_OMP_PARALLEL:
3769 case GIMPLE_OMP_TEAMS:
3770 error_at (gimple_location (stmt),
3771 "%<ordered%> region must be closely nested inside "
3772 "a loop region with an %<ordered%> clause");
3773 return false;
3774 default:
3775 break;
3777 break;
3778 case GIMPLE_OMP_CRITICAL:
3780 tree this_stmt_name
3781 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3782 for (; ctx != NULL; ctx = ctx->outer)
3783 if (gomp_critical *other_crit
3784 = dyn_cast <gomp_critical *> (ctx->stmt))
3785 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3787 error_at (gimple_location (stmt),
3788 "%<critical%> region may not be nested inside "
3789 "a %<critical%> region with the same name");
3790 return false;
3793 break;
3794 case GIMPLE_OMP_TEAMS:
3795 if (ctx == NULL)
3796 break;
3797 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3798 || (gimple_omp_target_kind (ctx->stmt)
3799 != GF_OMP_TARGET_KIND_REGION))
3801 /* Teams construct can appear either strictly nested inside of
3802 target construct with no intervening stmts, or can be encountered
3803 only by initial task (so must not appear inside any OpenMP
3804 construct. */
3805 error_at (gimple_location (stmt),
3806 "%<teams%> construct must be closely nested inside of "
3807 "%<target%> construct or not nested in any OpenMP "
3808 "construct");
3809 return false;
3811 break;
3812 case GIMPLE_OMP_TARGET:
3813 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3814 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS)
3816 enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c);
3817 error_at (OMP_CLAUSE_LOCATION (c),
3818 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3819 kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink");
3820 return false;
3822 if (is_gimple_omp_offloaded (stmt)
3823 && oacc_get_fn_attrib (cfun->decl) != NULL)
3825 error_at (gimple_location (stmt),
3826 "OpenACC region inside of OpenACC routine, nested "
3827 "parallelism not supported yet");
3828 return false;
3830 for (; ctx != NULL; ctx = ctx->outer)
3832 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3834 if (is_gimple_omp (stmt)
3835 && is_gimple_omp_oacc (stmt)
3836 && is_gimple_omp (ctx->stmt))
3838 error_at (gimple_location (stmt),
3839 "OpenACC construct inside of non-OpenACC region");
3840 return false;
3842 continue;
3845 const char *stmt_name, *ctx_stmt_name;
3846 switch (gimple_omp_target_kind (stmt))
3848 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3849 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3850 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3851 case GF_OMP_TARGET_KIND_ENTER_DATA:
3852 stmt_name = "target enter data"; break;
3853 case GF_OMP_TARGET_KIND_EXIT_DATA:
3854 stmt_name = "target exit data"; break;
3855 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3856 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3857 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3858 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3859 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3860 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3861 stmt_name = "enter data"; break;
3862 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3863 stmt_name = "exit data"; break;
3864 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3865 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3866 break;
3867 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3868 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3869 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3870 /* OpenACC 'kernels' decomposed parts. */
3871 stmt_name = "kernels"; break;
3872 default: gcc_unreachable ();
3874 switch (gimple_omp_target_kind (ctx->stmt))
3876 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3877 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3878 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3879 ctx_stmt_name = "parallel"; break;
3880 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3881 ctx_stmt_name = "kernels"; break;
3882 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3883 ctx_stmt_name = "serial"; break;
3884 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3885 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3886 ctx_stmt_name = "host_data"; break;
3887 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3888 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3889 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3890 /* OpenACC 'kernels' decomposed parts. */
3891 ctx_stmt_name = "kernels"; break;
3892 default: gcc_unreachable ();
3895 /* OpenACC/OpenMP mismatch? */
3896 if (is_gimple_omp_oacc (stmt)
3897 != is_gimple_omp_oacc (ctx->stmt))
3899 error_at (gimple_location (stmt),
3900 "%s %qs construct inside of %s %qs region",
3901 (is_gimple_omp_oacc (stmt)
3902 ? "OpenACC" : "OpenMP"), stmt_name,
3903 (is_gimple_omp_oacc (ctx->stmt)
3904 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3905 return false;
3907 if (is_gimple_omp_offloaded (ctx->stmt))
3909 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3910 if (is_gimple_omp_oacc (ctx->stmt))
3912 error_at (gimple_location (stmt),
3913 "%qs construct inside of %qs region",
3914 stmt_name, ctx_stmt_name);
3915 return false;
3917 else
3919 if ((gimple_omp_target_kind (ctx->stmt)
3920 == GF_OMP_TARGET_KIND_REGION)
3921 && (gimple_omp_target_kind (stmt)
3922 == GF_OMP_TARGET_KIND_REGION))
3924 c = omp_find_clause (gimple_omp_target_clauses (stmt),
3925 OMP_CLAUSE_DEVICE);
3926 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3927 break;
3929 warning_at (gimple_location (stmt), 0,
3930 "%qs construct inside of %qs region",
3931 stmt_name, ctx_stmt_name);
3935 break;
3936 default:
3937 break;
3939 return true;
3943 /* Helper function scan_omp.
3945 Callback for walk_tree or operators in walk_gimple_stmt used to
3946 scan for OMP directives in TP. */
3948 static tree
3949 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3951 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3952 omp_context *ctx = (omp_context *) wi->info;
3953 tree t = *tp;
3955 switch (TREE_CODE (t))
3957 case VAR_DECL:
3958 case PARM_DECL:
3959 case LABEL_DECL:
3960 case RESULT_DECL:
3961 if (ctx)
3963 tree repl = remap_decl (t, &ctx->cb);
3964 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3965 *tp = repl;
3967 break;
3969 default:
3970 if (ctx && TYPE_P (t))
3971 *tp = remap_type (t, &ctx->cb);
3972 else if (!DECL_P (t))
3974 *walk_subtrees = 1;
3975 if (ctx)
3977 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3978 if (tem != TREE_TYPE (t))
3980 if (TREE_CODE (t) == INTEGER_CST)
3981 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3982 else
3983 TREE_TYPE (t) = tem;
3987 break;
3990 return NULL_TREE;
3993 /* Return true if FNDECL is a setjmp or a longjmp. */
3995 static bool
3996 setjmp_or_longjmp_p (const_tree fndecl)
3998 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP, BUILT_IN_LONGJMP))
3999 return true;
4001 tree declname = DECL_NAME (fndecl);
4002 if (!declname
4003 || (DECL_CONTEXT (fndecl) != NULL_TREE
4004 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
4005 || !TREE_PUBLIC (fndecl))
4006 return false;
4008 const char *name = IDENTIFIER_POINTER (declname);
4009 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
4012 /* Helper function for scan_omp.
4014 Callback for walk_gimple_stmt used to scan for OMP directives in
4015 the current statement in GSI. */
4017 static tree
4018 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4019 struct walk_stmt_info *wi)
4021 gimple *stmt = gsi_stmt (*gsi);
4022 omp_context *ctx = (omp_context *) wi->info;
4024 if (gimple_has_location (stmt))
4025 input_location = gimple_location (stmt);
4027 /* Check the nesting restrictions. */
4028 bool remove = false;
4029 if (is_gimple_omp (stmt))
4030 remove = !check_omp_nesting_restrictions (stmt, ctx);
4031 else if (is_gimple_call (stmt))
4033 tree fndecl = gimple_call_fndecl (stmt);
4034 if (fndecl)
4036 if (ctx
4037 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4038 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4039 && setjmp_or_longjmp_p (fndecl)
4040 && !ctx->loop_p)
4042 remove = true;
4043 error_at (gimple_location (stmt),
4044 "setjmp/longjmp inside %<simd%> construct");
4046 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4047 switch (DECL_FUNCTION_CODE (fndecl))
4049 case BUILT_IN_GOMP_BARRIER:
4050 case BUILT_IN_GOMP_CANCEL:
4051 case BUILT_IN_GOMP_CANCELLATION_POINT:
4052 case BUILT_IN_GOMP_TASKYIELD:
4053 case BUILT_IN_GOMP_TASKWAIT:
4054 case BUILT_IN_GOMP_TASKGROUP_START:
4055 case BUILT_IN_GOMP_TASKGROUP_END:
4056 remove = !check_omp_nesting_restrictions (stmt, ctx);
4057 break;
4058 default:
4059 break;
4061 else if (ctx)
4063 omp_context *octx = ctx;
4064 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4065 octx = ctx->outer;
4066 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4068 remove = true;
4069 error_at (gimple_location (stmt),
4070 "OpenMP runtime API call %qD in a region with "
4071 "%<order(concurrent)%> clause", fndecl);
4073 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4074 && omp_runtime_api_call (fndecl)
4075 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4076 != strlen ("omp_get_num_teams"))
4077 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4078 "omp_get_num_teams") != 0)
4079 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4080 != strlen ("omp_get_team_num"))
4081 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4082 "omp_get_team_num") != 0))
4084 remove = true;
4085 error_at (gimple_location (stmt),
4086 "OpenMP runtime API call %qD strictly nested in a "
4087 "%<teams%> region", fndecl);
4089 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
4090 && (gimple_omp_target_kind (ctx->stmt)
4091 == GF_OMP_TARGET_KIND_REGION)
4092 && omp_runtime_api_call (fndecl))
4094 tree tgt_clauses = gimple_omp_target_clauses (ctx->stmt);
4095 tree c = omp_find_clause (tgt_clauses, OMP_CLAUSE_DEVICE);
4096 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4097 error_at (gimple_location (stmt),
4098 "OpenMP runtime API call %qD in a region with "
4099 "%<device(ancestor)%> clause", fndecl);
4104 if (remove)
4106 stmt = gimple_build_nop ();
4107 gsi_replace (gsi, stmt, false);
4110 *handled_ops_p = true;
4112 switch (gimple_code (stmt))
4114 case GIMPLE_OMP_PARALLEL:
4115 taskreg_nesting_level++;
4116 scan_omp_parallel (gsi, ctx);
4117 taskreg_nesting_level--;
4118 break;
4120 case GIMPLE_OMP_TASK:
4121 taskreg_nesting_level++;
4122 scan_omp_task (gsi, ctx);
4123 taskreg_nesting_level--;
4124 break;
4126 case GIMPLE_OMP_FOR:
4127 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4128 == GF_OMP_FOR_KIND_SIMD)
4129 && gimple_omp_for_combined_into_p (stmt)
4130 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4132 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4133 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4134 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4136 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4137 break;
4140 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4141 == GF_OMP_FOR_KIND_SIMD)
4142 && omp_maybe_offloaded_ctx (ctx)
4143 && omp_max_simt_vf ()
4144 && gimple_omp_for_collapse (stmt) == 1)
4145 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4146 else
4147 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4148 break;
4150 case GIMPLE_OMP_SCOPE:
4151 ctx = new_omp_context (stmt, ctx);
4152 scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4153 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4154 break;
4156 case GIMPLE_OMP_SECTIONS:
4157 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4158 break;
4160 case GIMPLE_OMP_SINGLE:
4161 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4162 break;
4164 case GIMPLE_OMP_SCAN:
4165 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4167 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4168 ctx->scan_inclusive = true;
4169 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4170 ctx->scan_exclusive = true;
4172 /* FALLTHRU */
4173 case GIMPLE_OMP_SECTION:
4174 case GIMPLE_OMP_STRUCTURED_BLOCK:
4175 case GIMPLE_OMP_MASTER:
4176 case GIMPLE_OMP_ORDERED:
4177 case GIMPLE_OMP_CRITICAL:
4178 ctx = new_omp_context (stmt, ctx);
4179 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4180 break;
4182 case GIMPLE_OMP_MASKED:
4183 ctx = new_omp_context (stmt, ctx);
4184 scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4185 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4186 break;
4188 case GIMPLE_OMP_TASKGROUP:
4189 ctx = new_omp_context (stmt, ctx);
4190 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4191 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4192 break;
4194 case GIMPLE_OMP_TARGET:
4195 if (is_gimple_omp_offloaded (stmt))
4197 taskreg_nesting_level++;
4198 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4199 taskreg_nesting_level--;
4201 else
4202 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4203 break;
4205 case GIMPLE_OMP_TEAMS:
4206 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4208 taskreg_nesting_level++;
4209 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4210 taskreg_nesting_level--;
4212 else
4213 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4214 break;
4216 case GIMPLE_BIND:
4218 tree var;
4220 *handled_ops_p = false;
4221 if (ctx)
4222 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4223 var ;
4224 var = DECL_CHAIN (var))
4225 insert_decl_map (&ctx->cb, var, var);
4227 break;
4228 default:
4229 *handled_ops_p = false;
4230 break;
4233 return NULL_TREE;
4237 /* Scan all the statements starting at the current statement. CTX
4238 contains context information about the OMP directives and
4239 clauses found during the scan. */
4241 static void
4242 scan_omp (gimple_seq *body_p, omp_context *ctx)
4244 location_t saved_location;
4245 struct walk_stmt_info wi;
4247 memset (&wi, 0, sizeof (wi));
4248 wi.info = ctx;
4249 wi.want_locations = true;
4251 saved_location = input_location;
4252 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4253 input_location = saved_location;
4256 /* Re-gimplification and code generation routines. */
4258 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4259 of BIND if in a method. */
4261 static void
4262 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4264 if (DECL_ARGUMENTS (current_function_decl)
4265 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4266 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4267 == POINTER_TYPE))
4269 tree vars = gimple_bind_vars (bind);
4270 for (tree *pvar = &vars; *pvar; )
4271 if (omp_member_access_dummy_var (*pvar))
4272 *pvar = DECL_CHAIN (*pvar);
4273 else
4274 pvar = &DECL_CHAIN (*pvar);
4275 gimple_bind_set_vars (bind, vars);
4279 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4280 block and its subblocks. */
4282 static void
4283 remove_member_access_dummy_vars (tree block)
4285 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4286 if (omp_member_access_dummy_var (*pvar))
4287 *pvar = DECL_CHAIN (*pvar);
4288 else
4289 pvar = &DECL_CHAIN (*pvar);
4291 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4292 remove_member_access_dummy_vars (block);
4295 /* If a context was created for STMT when it was scanned, return it. */
4297 static omp_context *
4298 maybe_lookup_ctx (gimple *stmt)
4300 splay_tree_node n;
4301 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4302 return n ? (omp_context *) n->value : NULL;
4306 /* Find the mapping for DECL in CTX or the immediately enclosing
4307 context that has a mapping for DECL.
4309 If CTX is a nested parallel directive, we may have to use the decl
4310 mappings created in CTX's parent context. Suppose that we have the
4311 following parallel nesting (variable UIDs showed for clarity):
4313 iD.1562 = 0;
4314 #omp parallel shared(iD.1562) -> outer parallel
4315 iD.1562 = iD.1562 + 1;
4317 #omp parallel shared (iD.1562) -> inner parallel
4318 iD.1562 = iD.1562 - 1;
4320 Each parallel structure will create a distinct .omp_data_s structure
4321 for copying iD.1562 in/out of the directive:
4323 outer parallel .omp_data_s.1.i -> iD.1562
4324 inner parallel .omp_data_s.2.i -> iD.1562
4326 A shared variable mapping will produce a copy-out operation before
4327 the parallel directive and a copy-in operation after it. So, in
4328 this case we would have:
4330 iD.1562 = 0;
4331 .omp_data_o.1.i = iD.1562;
4332 #omp parallel shared(iD.1562) -> outer parallel
4333 .omp_data_i.1 = &.omp_data_o.1
4334 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4336 .omp_data_o.2.i = iD.1562; -> **
4337 #omp parallel shared(iD.1562) -> inner parallel
4338 .omp_data_i.2 = &.omp_data_o.2
4339 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4342 ** This is a problem. The symbol iD.1562 cannot be referenced
4343 inside the body of the outer parallel region. But since we are
4344 emitting this copy operation while expanding the inner parallel
4345 directive, we need to access the CTX structure of the outer
4346 parallel directive to get the correct mapping:
4348 .omp_data_o.2.i = .omp_data_i.1->i
4350 Since there may be other workshare or parallel directives enclosing
4351 the parallel directive, it may be necessary to walk up the context
4352 parent chain. This is not a problem in general because nested
4353 parallelism happens only rarely. */
4355 static tree
4356 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4358 tree t;
4359 omp_context *up;
4361 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4362 t = maybe_lookup_decl (decl, up);
4364 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4366 return t ? t : decl;
4370 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4371 in outer contexts. */
4373 static tree
4374 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4376 tree t = NULL;
4377 omp_context *up;
4379 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4380 t = maybe_lookup_decl (decl, up);
4382 return t ? t : decl;
4386 /* Construct the initialization value for reduction operation OP. */
4388 tree
4389 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4391 switch (op)
4393 case PLUS_EXPR:
4394 case MINUS_EXPR:
4395 case BIT_IOR_EXPR:
4396 case BIT_XOR_EXPR:
4397 case TRUTH_OR_EXPR:
4398 case TRUTH_ORIF_EXPR:
4399 case TRUTH_XOR_EXPR:
4400 case NE_EXPR:
4401 return build_zero_cst (type);
4403 case MULT_EXPR:
4404 case TRUTH_AND_EXPR:
4405 case TRUTH_ANDIF_EXPR:
4406 case EQ_EXPR:
4407 return fold_convert_loc (loc, type, integer_one_node);
4409 case BIT_AND_EXPR:
4410 return fold_convert_loc (loc, type, integer_minus_one_node);
4412 case MAX_EXPR:
4413 if (SCALAR_FLOAT_TYPE_P (type))
4415 REAL_VALUE_TYPE min;
4416 if (HONOR_INFINITIES (type))
4417 real_arithmetic (&min, NEGATE_EXPR, &dconstinf, NULL);
4418 else
4419 real_maxval (&min, 1, TYPE_MODE (type));
4420 return build_real (type, min);
4422 else if (POINTER_TYPE_P (type))
4424 wide_int min
4425 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4426 return wide_int_to_tree (type, min);
4428 else
4430 gcc_assert (INTEGRAL_TYPE_P (type));
4431 return TYPE_MIN_VALUE (type);
4434 case MIN_EXPR:
4435 if (SCALAR_FLOAT_TYPE_P (type))
4437 REAL_VALUE_TYPE max;
4438 if (HONOR_INFINITIES (type))
4439 max = dconstinf;
4440 else
4441 real_maxval (&max, 0, TYPE_MODE (type));
4442 return build_real (type, max);
4444 else if (POINTER_TYPE_P (type))
4446 wide_int max
4447 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4448 return wide_int_to_tree (type, max);
4450 else
4452 gcc_assert (INTEGRAL_TYPE_P (type));
4453 return TYPE_MAX_VALUE (type);
4456 default:
4457 gcc_unreachable ();
4461 /* Construct the initialization value for reduction CLAUSE. */
4463 tree
4464 omp_reduction_init (tree clause, tree type)
4466 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4467 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4470 /* Return alignment to be assumed for var in CLAUSE, which should be
4471 OMP_CLAUSE_ALIGNED. */
4473 static tree
4474 omp_clause_aligned_alignment (tree clause)
4476 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4477 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4479 /* Otherwise return implementation defined alignment. */
4480 unsigned int al = 1;
4481 opt_scalar_mode mode_iter;
4482 auto_vector_modes modes;
4483 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4484 static enum mode_class classes[]
4485 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4486 for (int i = 0; i < 4; i += 2)
4487 /* The for loop above dictates that we only walk through scalar classes. */
4488 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4490 scalar_mode mode = mode_iter.require ();
4491 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4492 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4493 continue;
4494 machine_mode alt_vmode;
4495 for (unsigned int j = 0; j < modes.length (); ++j)
4496 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4497 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4498 vmode = alt_vmode;
4500 tree type = lang_hooks.types.type_for_mode (mode, 1);
4501 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4502 continue;
4503 type = build_vector_type_for_mode (type, vmode);
4504 if (TYPE_MODE (type) != vmode)
4505 continue;
4506 if (TYPE_ALIGN_UNIT (type) > al)
4507 al = TYPE_ALIGN_UNIT (type);
4509 return build_int_cst (integer_type_node, al);
4513 /* This structure is part of the interface between lower_rec_simd_input_clauses
4514 and lower_rec_input_clauses. */
4516 class omplow_simd_context {
4517 public:
4518 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4519 tree idx;
4520 tree lane;
4521 tree lastlane;
4522 vec<tree, va_heap> simt_eargs;
4523 gimple_seq simt_dlist;
4524 poly_uint64_pod max_vf;
4525 bool is_simt;
4528 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4529 privatization. */
4531 static bool
4532 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4533 omplow_simd_context *sctx, tree &ivar,
4534 tree &lvar, tree *rvar = NULL,
4535 tree *rvar2 = NULL)
4537 if (known_eq (sctx->max_vf, 0U))
4539 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4540 if (maybe_gt (sctx->max_vf, 1U))
4542 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4543 OMP_CLAUSE_SAFELEN);
4544 if (c)
4546 poly_uint64 safe_len;
4547 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4548 || maybe_lt (safe_len, 1U))
4549 sctx->max_vf = 1;
4550 else
4551 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4554 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4556 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4557 c = OMP_CLAUSE_CHAIN (c))
4559 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4560 continue;
4562 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4564 /* UDR reductions are not supported yet for SIMT, disable
4565 SIMT. */
4566 sctx->max_vf = 1;
4567 break;
4570 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4571 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4573 /* Doing boolean operations on non-integral types is
4574 for conformance only, it's not worth supporting this
4575 for SIMT. */
4576 sctx->max_vf = 1;
4577 break;
4581 if (maybe_gt (sctx->max_vf, 1U))
4583 sctx->idx = create_tmp_var (unsigned_type_node);
4584 sctx->lane = create_tmp_var (unsigned_type_node);
4587 if (known_eq (sctx->max_vf, 1U))
4588 return false;
4590 if (sctx->is_simt)
4592 if (is_gimple_reg (new_var))
4594 ivar = lvar = new_var;
4595 return true;
4597 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4598 ivar = lvar = create_tmp_var (type);
4599 TREE_ADDRESSABLE (ivar) = 1;
4600 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4601 NULL, DECL_ATTRIBUTES (ivar));
4602 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4603 tree clobber = build_clobber (type);
4604 gimple *g = gimple_build_assign (ivar, clobber);
4605 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4607 else
4609 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4610 tree avar = create_tmp_var_raw (atype);
4611 if (TREE_ADDRESSABLE (new_var))
4612 TREE_ADDRESSABLE (avar) = 1;
4613 DECL_ATTRIBUTES (avar)
4614 = tree_cons (get_identifier ("omp simd array"), NULL,
4615 DECL_ATTRIBUTES (avar));
4616 gimple_add_tmp_var (avar);
4617 tree iavar = avar;
4618 if (rvar && !ctx->for_simd_scan_phase)
4620 /* For inscan reductions, create another array temporary,
4621 which will hold the reduced value. */
4622 iavar = create_tmp_var_raw (atype);
4623 if (TREE_ADDRESSABLE (new_var))
4624 TREE_ADDRESSABLE (iavar) = 1;
4625 DECL_ATTRIBUTES (iavar)
4626 = tree_cons (get_identifier ("omp simd array"), NULL,
4627 tree_cons (get_identifier ("omp simd inscan"), NULL,
4628 DECL_ATTRIBUTES (iavar)));
4629 gimple_add_tmp_var (iavar);
4630 ctx->cb.decl_map->put (avar, iavar);
4631 if (sctx->lastlane == NULL_TREE)
4632 sctx->lastlane = create_tmp_var (unsigned_type_node);
4633 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4634 sctx->lastlane, NULL_TREE, NULL_TREE);
4635 TREE_THIS_NOTRAP (*rvar) = 1;
4637 if (ctx->scan_exclusive)
4639 /* And for exclusive scan yet another one, which will
4640 hold the value during the scan phase. */
4641 tree savar = create_tmp_var_raw (atype);
4642 if (TREE_ADDRESSABLE (new_var))
4643 TREE_ADDRESSABLE (savar) = 1;
4644 DECL_ATTRIBUTES (savar)
4645 = tree_cons (get_identifier ("omp simd array"), NULL,
4646 tree_cons (get_identifier ("omp simd inscan "
4647 "exclusive"), NULL,
4648 DECL_ATTRIBUTES (savar)));
4649 gimple_add_tmp_var (savar);
4650 ctx->cb.decl_map->put (iavar, savar);
4651 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4652 sctx->idx, NULL_TREE, NULL_TREE);
4653 TREE_THIS_NOTRAP (*rvar2) = 1;
4656 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4657 NULL_TREE, NULL_TREE);
4658 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4659 NULL_TREE, NULL_TREE);
4660 TREE_THIS_NOTRAP (ivar) = 1;
4661 TREE_THIS_NOTRAP (lvar) = 1;
4663 if (DECL_P (new_var))
4665 SET_DECL_VALUE_EXPR (new_var, lvar);
4666 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4668 return true;
4671 /* Helper function of lower_rec_input_clauses. For a reference
4672 in simd reduction, add an underlying variable it will reference. */
4674 static void
4675 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4677 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4678 if (TREE_CONSTANT (z))
4680 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4681 get_name (new_vard));
4682 gimple_add_tmp_var (z);
4683 TREE_ADDRESSABLE (z) = 1;
4684 z = build_fold_addr_expr_loc (loc, z);
4685 gimplify_assign (new_vard, z, ilist);
4689 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4690 code to emit (type) (tskred_temp[idx]). */
4692 static tree
4693 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4694 unsigned idx)
4696 unsigned HOST_WIDE_INT sz
4697 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4698 tree r = build2 (MEM_REF, pointer_sized_int_node,
4699 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4700 idx * sz));
4701 tree v = create_tmp_var (pointer_sized_int_node);
4702 gimple *g = gimple_build_assign (v, r);
4703 gimple_seq_add_stmt (ilist, g);
4704 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4706 v = create_tmp_var (type);
4707 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4708 gimple_seq_add_stmt (ilist, g);
4710 return v;
4713 /* Lower early initialization of privatized variable NEW_VAR
4714 if it needs an allocator (has allocate clause). */
4716 static bool
4717 lower_private_allocate (tree var, tree new_var, tree &allocator,
4718 tree &allocate_ptr, gimple_seq *ilist,
4719 omp_context *ctx, bool is_ref, tree size)
4721 if (allocator)
4722 return false;
4723 gcc_assert (allocate_ptr == NULL_TREE);
4724 if (ctx->allocate_map
4725 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4726 if (tree *allocatorp = ctx->allocate_map->get (var))
4727 allocator = *allocatorp;
4728 if (allocator == NULL_TREE)
4729 return false;
4730 if (!is_ref && omp_privatize_by_reference (var))
4732 allocator = NULL_TREE;
4733 return false;
4736 unsigned HOST_WIDE_INT ialign = 0;
4737 if (TREE_CODE (allocator) == TREE_LIST)
4739 ialign = tree_to_uhwi (TREE_VALUE (allocator));
4740 allocator = TREE_PURPOSE (allocator);
4742 if (TREE_CODE (allocator) != INTEGER_CST)
4743 allocator = build_outer_var_ref (allocator, ctx, OMP_CLAUSE_ALLOCATE);
4744 allocator = fold_convert (pointer_sized_int_node, allocator);
4745 if (TREE_CODE (allocator) != INTEGER_CST)
4747 tree var = create_tmp_var (TREE_TYPE (allocator));
4748 gimplify_assign (var, allocator, ilist);
4749 allocator = var;
4752 tree ptr_type, align, sz = size;
4753 if (TYPE_P (new_var))
4755 ptr_type = build_pointer_type (new_var);
4756 ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4758 else if (is_ref)
4760 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4761 ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4763 else
4765 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4766 ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4767 if (sz == NULL_TREE)
4768 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4770 align = build_int_cst (size_type_node, ialign);
4771 if (TREE_CODE (sz) != INTEGER_CST)
4773 tree szvar = create_tmp_var (size_type_node);
4774 gimplify_assign (szvar, sz, ilist);
4775 sz = szvar;
4777 allocate_ptr = create_tmp_var (ptr_type);
4778 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4779 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4780 gimple_call_set_lhs (g, allocate_ptr);
4781 gimple_seq_add_stmt (ilist, g);
4782 if (!is_ref)
4784 tree x = build_simple_mem_ref (allocate_ptr);
4785 TREE_THIS_NOTRAP (x) = 1;
4786 SET_DECL_VALUE_EXPR (new_var, x);
4787 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4789 return true;
4792 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4793 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4794 private variables. Initialization statements go in ILIST, while calls
4795 to destructors go in DLIST. */
4797 static void
4798 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4799 omp_context *ctx, struct omp_for_data *fd)
4801 tree c, copyin_seq, x, ptr;
4802 bool copyin_by_ref = false;
4803 bool lastprivate_firstprivate = false;
4804 bool reduction_omp_orig_ref = false;
4805 int pass;
4806 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4807 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4808 omplow_simd_context sctx = omplow_simd_context ();
4809 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4810 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4811 gimple_seq llist[4] = { };
4812 tree nonconst_simd_if = NULL_TREE;
4814 copyin_seq = NULL;
4815 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4817 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4818 with data sharing clauses referencing variable sized vars. That
4819 is unnecessarily hard to support and very unlikely to result in
4820 vectorized code anyway. */
4821 if (is_simd)
4822 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4823 switch (OMP_CLAUSE_CODE (c))
4825 case OMP_CLAUSE_LINEAR:
4826 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4827 sctx.max_vf = 1;
4828 /* FALLTHRU */
4829 case OMP_CLAUSE_PRIVATE:
4830 case OMP_CLAUSE_FIRSTPRIVATE:
4831 case OMP_CLAUSE_LASTPRIVATE:
4832 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4833 sctx.max_vf = 1;
4834 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4836 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4837 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4838 sctx.max_vf = 1;
4840 break;
4841 case OMP_CLAUSE_REDUCTION:
4842 case OMP_CLAUSE_IN_REDUCTION:
4843 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4844 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4845 sctx.max_vf = 1;
4846 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4848 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4849 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4850 sctx.max_vf = 1;
4852 break;
4853 case OMP_CLAUSE_IF:
4854 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4855 sctx.max_vf = 1;
4856 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4857 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4858 break;
4859 case OMP_CLAUSE_SIMDLEN:
4860 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4861 sctx.max_vf = 1;
4862 break;
4863 case OMP_CLAUSE__CONDTEMP_:
4864 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4865 if (sctx.is_simt)
4866 sctx.max_vf = 1;
4867 break;
4868 default:
4869 continue;
4872 /* Add a placeholder for simduid. */
4873 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4874 sctx.simt_eargs.safe_push (NULL_TREE);
4876 unsigned task_reduction_cnt = 0;
4877 unsigned task_reduction_cntorig = 0;
4878 unsigned task_reduction_cnt_full = 0;
4879 unsigned task_reduction_cntorig_full = 0;
4880 unsigned task_reduction_other_cnt = 0;
4881 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4882 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4883 /* Do all the fixed sized types in the first pass, and the variable sized
4884 types in the second pass. This makes sure that the scalar arguments to
4885 the variable sized types are processed before we use them in the
4886 variable sized operations. For task reductions we use 4 passes, in the
4887 first two we ignore them, in the third one gather arguments for
4888 GOMP_task_reduction_remap call and in the last pass actually handle
4889 the task reductions. */
4890 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4891 ? 4 : 2); ++pass)
4893 if (pass == 2 && task_reduction_cnt)
4895 tskred_atype
4896 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4897 + task_reduction_cntorig);
4898 tskred_avar = create_tmp_var_raw (tskred_atype);
4899 gimple_add_tmp_var (tskred_avar);
4900 TREE_ADDRESSABLE (tskred_avar) = 1;
4901 task_reduction_cnt_full = task_reduction_cnt;
4902 task_reduction_cntorig_full = task_reduction_cntorig;
4904 else if (pass == 3 && task_reduction_cnt)
4906 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4907 gimple *g
4908 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4909 size_int (task_reduction_cntorig),
4910 build_fold_addr_expr (tskred_avar));
4911 gimple_seq_add_stmt (ilist, g);
4913 if (pass == 3 && task_reduction_other_cnt)
4915 /* For reduction clauses, build
4916 tskred_base = (void *) tskred_temp[2]
4917 + omp_get_thread_num () * tskred_temp[1]
4918 or if tskred_temp[1] is known to be constant, that constant
4919 directly. This is the start of the private reduction copy block
4920 for the current thread. */
4921 tree v = create_tmp_var (integer_type_node);
4922 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4923 gimple *g = gimple_build_call (x, 0);
4924 gimple_call_set_lhs (g, v);
4925 gimple_seq_add_stmt (ilist, g);
4926 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4927 tskred_temp = OMP_CLAUSE_DECL (c);
4928 if (is_taskreg_ctx (ctx))
4929 tskred_temp = lookup_decl (tskred_temp, ctx);
4930 tree v2 = create_tmp_var (sizetype);
4931 g = gimple_build_assign (v2, NOP_EXPR, v);
4932 gimple_seq_add_stmt (ilist, g);
4933 if (ctx->task_reductions[0])
4934 v = fold_convert (sizetype, ctx->task_reductions[0]);
4935 else
4936 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4937 tree v3 = create_tmp_var (sizetype);
4938 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4939 gimple_seq_add_stmt (ilist, g);
4940 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4941 tskred_base = create_tmp_var (ptr_type_node);
4942 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4943 gimple_seq_add_stmt (ilist, g);
4945 task_reduction_cnt = 0;
4946 task_reduction_cntorig = 0;
4947 task_reduction_other_cnt = 0;
4948 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4950 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4951 tree var, new_var;
4952 bool by_ref;
4953 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4954 bool task_reduction_p = false;
4955 bool task_reduction_needs_orig_p = false;
4956 tree cond = NULL_TREE;
4957 tree allocator, allocate_ptr;
4959 switch (c_kind)
4961 case OMP_CLAUSE_PRIVATE:
4962 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4963 continue;
4964 break;
4965 case OMP_CLAUSE_SHARED:
4966 /* Ignore shared directives in teams construct inside
4967 of target construct. */
4968 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4969 && !is_host_teams_ctx (ctx))
4970 continue;
4971 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4973 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4974 || is_global_var (OMP_CLAUSE_DECL (c)));
4975 continue;
4977 case OMP_CLAUSE_FIRSTPRIVATE:
4978 case OMP_CLAUSE_COPYIN:
4979 break;
4980 case OMP_CLAUSE_LINEAR:
4981 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4982 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4983 lastprivate_firstprivate = true;
4984 break;
4985 case OMP_CLAUSE_REDUCTION:
4986 case OMP_CLAUSE_IN_REDUCTION:
4987 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
4988 || is_task_ctx (ctx)
4989 || OMP_CLAUSE_REDUCTION_TASK (c))
4991 task_reduction_p = true;
4992 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4994 task_reduction_other_cnt++;
4995 if (pass == 2)
4996 continue;
4998 else
4999 task_reduction_cnt++;
5000 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5002 var = OMP_CLAUSE_DECL (c);
5003 /* If var is a global variable that isn't privatized
5004 in outer contexts, we don't need to look up the
5005 original address, it is always the address of the
5006 global variable itself. */
5007 if (!DECL_P (var)
5008 || omp_privatize_by_reference (var)
5009 || !is_global_var
5010 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5012 task_reduction_needs_orig_p = true;
5013 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5014 task_reduction_cntorig++;
5018 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5019 reduction_omp_orig_ref = true;
5020 break;
5021 case OMP_CLAUSE__REDUCTEMP_:
5022 if (!is_taskreg_ctx (ctx))
5023 continue;
5024 /* FALLTHRU */
5025 case OMP_CLAUSE__LOOPTEMP_:
5026 /* Handle _looptemp_/_reductemp_ clauses only on
5027 parallel/task. */
5028 if (fd)
5029 continue;
5030 break;
5031 case OMP_CLAUSE_LASTPRIVATE:
5032 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5034 lastprivate_firstprivate = true;
5035 if (pass != 0 || is_taskloop_ctx (ctx))
5036 continue;
5038 /* Even without corresponding firstprivate, if
5039 decl is Fortran allocatable, it needs outer var
5040 reference. */
5041 else if (pass == 0
5042 && lang_hooks.decls.omp_private_outer_ref
5043 (OMP_CLAUSE_DECL (c)))
5044 lastprivate_firstprivate = true;
5045 break;
5046 case OMP_CLAUSE_ALIGNED:
5047 if (pass != 1)
5048 continue;
5049 var = OMP_CLAUSE_DECL (c);
5050 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5051 && !is_global_var (var))
5053 new_var = maybe_lookup_decl (var, ctx);
5054 if (new_var == NULL_TREE)
5055 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5056 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5057 tree alarg = omp_clause_aligned_alignment (c);
5058 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5059 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5060 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5061 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5062 gimplify_and_add (x, ilist);
5064 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5065 && is_global_var (var))
5067 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5068 new_var = lookup_decl (var, ctx);
5069 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5070 t = build_fold_addr_expr_loc (clause_loc, t);
5071 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5072 tree alarg = omp_clause_aligned_alignment (c);
5073 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5074 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5075 t = fold_convert_loc (clause_loc, ptype, t);
5076 x = create_tmp_var (ptype);
5077 t = build2 (MODIFY_EXPR, ptype, x, t);
5078 gimplify_and_add (t, ilist);
5079 t = build_simple_mem_ref_loc (clause_loc, x);
5080 SET_DECL_VALUE_EXPR (new_var, t);
5081 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5083 continue;
5084 case OMP_CLAUSE__CONDTEMP_:
5085 if (is_parallel_ctx (ctx)
5086 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5087 break;
5088 continue;
5089 default:
5090 continue;
5093 if (task_reduction_p != (pass >= 2))
5094 continue;
5096 allocator = NULL_TREE;
5097 allocate_ptr = NULL_TREE;
5098 new_var = var = OMP_CLAUSE_DECL (c);
5099 if ((c_kind == OMP_CLAUSE_REDUCTION
5100 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5101 && TREE_CODE (var) == MEM_REF)
5103 var = TREE_OPERAND (var, 0);
5104 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5105 var = TREE_OPERAND (var, 0);
5106 if (TREE_CODE (var) == INDIRECT_REF
5107 || TREE_CODE (var) == ADDR_EXPR)
5108 var = TREE_OPERAND (var, 0);
5109 if (is_variable_sized (var))
5111 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5112 var = DECL_VALUE_EXPR (var);
5113 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5114 var = TREE_OPERAND (var, 0);
5115 gcc_assert (DECL_P (var));
5117 new_var = var;
5119 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5121 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5122 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5124 else if (c_kind != OMP_CLAUSE_COPYIN)
5125 new_var = lookup_decl (var, ctx);
5127 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5129 if (pass != 0)
5130 continue;
5132 /* C/C++ array section reductions. */
5133 else if ((c_kind == OMP_CLAUSE_REDUCTION
5134 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5135 && var != OMP_CLAUSE_DECL (c))
5137 if (pass == 0)
5138 continue;
5140 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5141 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5143 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5145 tree b = TREE_OPERAND (orig_var, 1);
5146 if (is_omp_target (ctx->stmt))
5147 b = NULL_TREE;
5148 else
5149 b = maybe_lookup_decl (b, ctx);
5150 if (b == NULL)
5152 b = TREE_OPERAND (orig_var, 1);
5153 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5155 if (integer_zerop (bias))
5156 bias = b;
5157 else
5159 bias = fold_convert_loc (clause_loc,
5160 TREE_TYPE (b), bias);
5161 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5162 TREE_TYPE (b), b, bias);
5164 orig_var = TREE_OPERAND (orig_var, 0);
5166 if (pass == 2)
5168 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5169 if (is_global_var (out)
5170 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5171 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5172 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5173 != POINTER_TYPE)))
5174 x = var;
5175 else if (is_omp_target (ctx->stmt))
5176 x = out;
5177 else
5179 bool by_ref = use_pointer_for_field (var, NULL);
5180 x = build_receiver_ref (var, by_ref, ctx);
5181 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5182 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5183 == POINTER_TYPE))
5184 x = build_fold_addr_expr (x);
5186 if (TREE_CODE (orig_var) == INDIRECT_REF)
5187 x = build_simple_mem_ref (x);
5188 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5190 if (var == TREE_OPERAND (orig_var, 0))
5191 x = build_fold_addr_expr (x);
5193 bias = fold_convert (sizetype, bias);
5194 x = fold_convert (ptr_type_node, x);
5195 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5196 TREE_TYPE (x), x, bias);
5197 unsigned cnt = task_reduction_cnt - 1;
5198 if (!task_reduction_needs_orig_p)
5199 cnt += (task_reduction_cntorig_full
5200 - task_reduction_cntorig);
5201 else
5202 cnt = task_reduction_cntorig - 1;
5203 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5204 size_int (cnt), NULL_TREE, NULL_TREE);
5205 gimplify_assign (r, x, ilist);
5206 continue;
5209 if (TREE_CODE (orig_var) == INDIRECT_REF
5210 || TREE_CODE (orig_var) == ADDR_EXPR)
5211 orig_var = TREE_OPERAND (orig_var, 0);
5212 tree d = OMP_CLAUSE_DECL (c);
5213 tree type = TREE_TYPE (d);
5214 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5215 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5216 tree sz = v;
5217 const char *name = get_name (orig_var);
5218 if (pass != 3 && !TREE_CONSTANT (v))
5220 tree t;
5221 if (is_omp_target (ctx->stmt))
5222 t = NULL_TREE;
5223 else
5224 t = maybe_lookup_decl (v, ctx);
5225 if (t)
5226 v = t;
5227 else
5228 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5229 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5230 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5231 TREE_TYPE (v), v,
5232 build_int_cst (TREE_TYPE (v), 1));
5233 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5234 TREE_TYPE (v), t,
5235 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5237 if (pass == 3)
5239 tree xv = create_tmp_var (ptr_type_node);
5240 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5242 unsigned cnt = task_reduction_cnt - 1;
5243 if (!task_reduction_needs_orig_p)
5244 cnt += (task_reduction_cntorig_full
5245 - task_reduction_cntorig);
5246 else
5247 cnt = task_reduction_cntorig - 1;
5248 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5249 size_int (cnt), NULL_TREE, NULL_TREE);
5251 gimple *g = gimple_build_assign (xv, x);
5252 gimple_seq_add_stmt (ilist, g);
5254 else
5256 unsigned int idx = *ctx->task_reduction_map->get (c);
5257 tree off;
5258 if (ctx->task_reductions[1 + idx])
5259 off = fold_convert (sizetype,
5260 ctx->task_reductions[1 + idx]);
5261 else
5262 off = task_reduction_read (ilist, tskred_temp, sizetype,
5263 7 + 3 * idx + 1);
5264 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5265 tskred_base, off);
5266 gimple_seq_add_stmt (ilist, g);
5268 x = fold_convert (build_pointer_type (boolean_type_node),
5269 xv);
5270 if (TREE_CONSTANT (v))
5271 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5272 TYPE_SIZE_UNIT (type));
5273 else
5275 tree t;
5276 if (is_omp_target (ctx->stmt))
5277 t = NULL_TREE;
5278 else
5279 t = maybe_lookup_decl (v, ctx);
5280 if (t)
5281 v = t;
5282 else
5283 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5284 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5285 fb_rvalue);
5286 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5287 TREE_TYPE (v), v,
5288 build_int_cst (TREE_TYPE (v), 1));
5289 t = fold_build2_loc (clause_loc, MULT_EXPR,
5290 TREE_TYPE (v), t,
5291 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5292 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5294 cond = create_tmp_var (TREE_TYPE (x));
5295 gimplify_assign (cond, x, ilist);
5296 x = xv;
5298 else if (lower_private_allocate (var, type, allocator,
5299 allocate_ptr, ilist, ctx,
5300 true,
5301 TREE_CONSTANT (v)
5302 ? TYPE_SIZE_UNIT (type)
5303 : sz))
5304 x = allocate_ptr;
5305 else if (TREE_CONSTANT (v))
5307 x = create_tmp_var_raw (type, name);
5308 gimple_add_tmp_var (x);
5309 TREE_ADDRESSABLE (x) = 1;
5310 x = build_fold_addr_expr_loc (clause_loc, x);
5312 else
5314 tree atmp
5315 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5316 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5317 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5320 tree ptype = build_pointer_type (TREE_TYPE (type));
5321 x = fold_convert_loc (clause_loc, ptype, x);
5322 tree y = create_tmp_var (ptype, name);
5323 gimplify_assign (y, x, ilist);
5324 x = y;
5325 tree yb = y;
5327 if (!integer_zerop (bias))
5329 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5330 bias);
5331 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5333 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5334 pointer_sized_int_node, yb, bias);
5335 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5336 yb = create_tmp_var (ptype, name);
5337 gimplify_assign (yb, x, ilist);
5338 x = yb;
5341 d = TREE_OPERAND (d, 0);
5342 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5343 d = TREE_OPERAND (d, 0);
5344 if (TREE_CODE (d) == ADDR_EXPR)
5346 if (orig_var != var)
5348 gcc_assert (is_variable_sized (orig_var));
5349 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5351 gimplify_assign (new_var, x, ilist);
5352 tree new_orig_var = lookup_decl (orig_var, ctx);
5353 tree t = build_fold_indirect_ref (new_var);
5354 DECL_IGNORED_P (new_var) = 0;
5355 TREE_THIS_NOTRAP (t) = 1;
5356 SET_DECL_VALUE_EXPR (new_orig_var, t);
5357 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5359 else
5361 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5362 build_int_cst (ptype, 0));
5363 SET_DECL_VALUE_EXPR (new_var, x);
5364 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5367 else
5369 gcc_assert (orig_var == var);
5370 if (TREE_CODE (d) == INDIRECT_REF)
5372 x = create_tmp_var (ptype, name);
5373 TREE_ADDRESSABLE (x) = 1;
5374 gimplify_assign (x, yb, ilist);
5375 x = build_fold_addr_expr_loc (clause_loc, x);
5377 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5378 gimplify_assign (new_var, x, ilist);
5380 /* GOMP_taskgroup_reduction_register memsets the whole
5381 array to zero. If the initializer is zero, we don't
5382 need to initialize it again, just mark it as ever
5383 used unconditionally, i.e. cond = true. */
5384 if (cond
5385 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5386 && initializer_zerop (omp_reduction_init (c,
5387 TREE_TYPE (type))))
5389 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5390 boolean_true_node);
5391 gimple_seq_add_stmt (ilist, g);
5392 continue;
5394 tree end = create_artificial_label (UNKNOWN_LOCATION);
5395 if (cond)
5397 gimple *g;
5398 if (!is_parallel_ctx (ctx))
5400 tree condv = create_tmp_var (boolean_type_node);
5401 g = gimple_build_assign (condv,
5402 build_simple_mem_ref (cond));
5403 gimple_seq_add_stmt (ilist, g);
5404 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5405 g = gimple_build_cond (NE_EXPR, condv,
5406 boolean_false_node, end, lab1);
5407 gimple_seq_add_stmt (ilist, g);
5408 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5410 g = gimple_build_assign (build_simple_mem_ref (cond),
5411 boolean_true_node);
5412 gimple_seq_add_stmt (ilist, g);
5415 tree y1 = create_tmp_var (ptype);
5416 gimplify_assign (y1, y, ilist);
5417 tree i2 = NULL_TREE, y2 = NULL_TREE;
5418 tree body2 = NULL_TREE, end2 = NULL_TREE;
5419 tree y3 = NULL_TREE, y4 = NULL_TREE;
5420 if (task_reduction_needs_orig_p)
5422 y3 = create_tmp_var (ptype);
5423 tree ref;
5424 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5425 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5426 size_int (task_reduction_cnt_full
5427 + task_reduction_cntorig - 1),
5428 NULL_TREE, NULL_TREE);
5429 else
5431 unsigned int idx = *ctx->task_reduction_map->get (c);
5432 ref = task_reduction_read (ilist, tskred_temp, ptype,
5433 7 + 3 * idx);
5435 gimplify_assign (y3, ref, ilist);
5437 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5439 if (pass != 3)
5441 y2 = create_tmp_var (ptype);
5442 gimplify_assign (y2, y, ilist);
5444 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5446 tree ref = build_outer_var_ref (var, ctx);
5447 /* For ref build_outer_var_ref already performs this. */
5448 if (TREE_CODE (d) == INDIRECT_REF)
5449 gcc_assert (omp_privatize_by_reference (var));
5450 else if (TREE_CODE (d) == ADDR_EXPR)
5451 ref = build_fold_addr_expr (ref);
5452 else if (omp_privatize_by_reference (var))
5453 ref = build_fold_addr_expr (ref);
5454 ref = fold_convert_loc (clause_loc, ptype, ref);
5455 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5456 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5458 y3 = create_tmp_var (ptype);
5459 gimplify_assign (y3, unshare_expr (ref), ilist);
5461 if (is_simd)
5463 y4 = create_tmp_var (ptype);
5464 gimplify_assign (y4, ref, dlist);
5468 tree i = create_tmp_var (TREE_TYPE (v));
5469 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5470 tree body = create_artificial_label (UNKNOWN_LOCATION);
5471 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5472 if (y2)
5474 i2 = create_tmp_var (TREE_TYPE (v));
5475 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5476 body2 = create_artificial_label (UNKNOWN_LOCATION);
5477 end2 = create_artificial_label (UNKNOWN_LOCATION);
5478 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5480 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5482 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5483 tree decl_placeholder
5484 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5485 SET_DECL_VALUE_EXPR (decl_placeholder,
5486 build_simple_mem_ref (y1));
5487 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5488 SET_DECL_VALUE_EXPR (placeholder,
5489 y3 ? build_simple_mem_ref (y3)
5490 : error_mark_node);
5491 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5492 x = lang_hooks.decls.omp_clause_default_ctor
5493 (c, build_simple_mem_ref (y1),
5494 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5495 if (x)
5496 gimplify_and_add (x, ilist);
5497 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5499 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5500 lower_omp (&tseq, ctx);
5501 gimple_seq_add_seq (ilist, tseq);
5503 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5504 if (is_simd)
5506 SET_DECL_VALUE_EXPR (decl_placeholder,
5507 build_simple_mem_ref (y2));
5508 SET_DECL_VALUE_EXPR (placeholder,
5509 build_simple_mem_ref (y4));
5510 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5511 lower_omp (&tseq, ctx);
5512 gimple_seq_add_seq (dlist, tseq);
5513 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5515 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5516 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5517 if (y2)
5519 x = lang_hooks.decls.omp_clause_dtor
5520 (c, build_simple_mem_ref (y2));
5521 if (x)
5522 gimplify_and_add (x, dlist);
5525 else
5527 x = omp_reduction_init (c, TREE_TYPE (type));
5528 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5530 /* reduction(-:var) sums up the partial results, so it
5531 acts identically to reduction(+:var). */
5532 if (code == MINUS_EXPR)
5533 code = PLUS_EXPR;
5535 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5536 if (is_simd)
5538 x = build2 (code, TREE_TYPE (type),
5539 build_simple_mem_ref (y4),
5540 build_simple_mem_ref (y2));
5541 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5544 gimple *g
5545 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5546 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5547 gimple_seq_add_stmt (ilist, g);
5548 if (y3)
5550 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5551 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5552 gimple_seq_add_stmt (ilist, g);
5554 g = gimple_build_assign (i, PLUS_EXPR, i,
5555 build_int_cst (TREE_TYPE (i), 1));
5556 gimple_seq_add_stmt (ilist, g);
5557 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5558 gimple_seq_add_stmt (ilist, g);
5559 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5560 if (y2)
5562 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5563 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5564 gimple_seq_add_stmt (dlist, g);
5565 if (y4)
5567 g = gimple_build_assign
5568 (y4, POINTER_PLUS_EXPR, y4,
5569 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5570 gimple_seq_add_stmt (dlist, g);
5572 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5573 build_int_cst (TREE_TYPE (i2), 1));
5574 gimple_seq_add_stmt (dlist, g);
5575 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5576 gimple_seq_add_stmt (dlist, g);
5577 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5579 if (allocator)
5581 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5582 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5583 gimple_seq_add_stmt (dlist, g);
5585 continue;
5587 else if (pass == 2)
5589 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5590 if (is_global_var (out))
5591 x = var;
5592 else if (is_omp_target (ctx->stmt))
5593 x = out;
5594 else
5596 bool by_ref = use_pointer_for_field (var, ctx);
5597 x = build_receiver_ref (var, by_ref, ctx);
5599 if (!omp_privatize_by_reference (var))
5600 x = build_fold_addr_expr (x);
5601 x = fold_convert (ptr_type_node, x);
5602 unsigned cnt = task_reduction_cnt - 1;
5603 if (!task_reduction_needs_orig_p)
5604 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5605 else
5606 cnt = task_reduction_cntorig - 1;
5607 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5608 size_int (cnt), NULL_TREE, NULL_TREE);
5609 gimplify_assign (r, x, ilist);
5610 continue;
5612 else if (pass == 3)
5614 tree type = TREE_TYPE (new_var);
5615 if (!omp_privatize_by_reference (var))
5616 type = build_pointer_type (type);
5617 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5619 unsigned cnt = task_reduction_cnt - 1;
5620 if (!task_reduction_needs_orig_p)
5621 cnt += (task_reduction_cntorig_full
5622 - task_reduction_cntorig);
5623 else
5624 cnt = task_reduction_cntorig - 1;
5625 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5626 size_int (cnt), NULL_TREE, NULL_TREE);
5628 else
5630 unsigned int idx = *ctx->task_reduction_map->get (c);
5631 tree off;
5632 if (ctx->task_reductions[1 + idx])
5633 off = fold_convert (sizetype,
5634 ctx->task_reductions[1 + idx]);
5635 else
5636 off = task_reduction_read (ilist, tskred_temp, sizetype,
5637 7 + 3 * idx + 1);
5638 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5639 tskred_base, off);
5641 x = fold_convert (type, x);
5642 tree t;
5643 if (omp_privatize_by_reference (var))
5645 gimplify_assign (new_var, x, ilist);
5646 t = new_var;
5647 new_var = build_simple_mem_ref (new_var);
5649 else
5651 t = create_tmp_var (type);
5652 gimplify_assign (t, x, ilist);
5653 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5654 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5656 t = fold_convert (build_pointer_type (boolean_type_node), t);
5657 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5658 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5659 cond = create_tmp_var (TREE_TYPE (t));
5660 gimplify_assign (cond, t, ilist);
5662 else if (is_variable_sized (var))
5664 /* For variable sized types, we need to allocate the
5665 actual storage here. Call alloca and store the
5666 result in the pointer decl that we created elsewhere. */
5667 if (pass == 0)
5668 continue;
5670 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5672 tree tmp;
5674 ptr = DECL_VALUE_EXPR (new_var);
5675 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5676 ptr = TREE_OPERAND (ptr, 0);
5677 gcc_assert (DECL_P (ptr));
5678 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5680 if (lower_private_allocate (var, new_var, allocator,
5681 allocate_ptr, ilist, ctx,
5682 false, x))
5683 tmp = allocate_ptr;
5684 else
5686 /* void *tmp = __builtin_alloca */
5687 tree atmp
5688 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5689 gcall *stmt
5690 = gimple_build_call (atmp, 2, x,
5691 size_int (DECL_ALIGN (var)));
5692 cfun->calls_alloca = 1;
5693 tmp = create_tmp_var_raw (ptr_type_node);
5694 gimple_add_tmp_var (tmp);
5695 gimple_call_set_lhs (stmt, tmp);
5697 gimple_seq_add_stmt (ilist, stmt);
5700 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5701 gimplify_assign (ptr, x, ilist);
5704 else if (omp_privatize_by_reference (var)
5705 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5706 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5708 /* For references that are being privatized for Fortran,
5709 allocate new backing storage for the new pointer
5710 variable. This allows us to avoid changing all the
5711 code that expects a pointer to something that expects
5712 a direct variable. */
5713 if (pass == 0)
5714 continue;
5716 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5717 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5719 x = build_receiver_ref (var, false, ctx);
5720 if (ctx->allocate_map)
5721 if (tree *allocatep = ctx->allocate_map->get (var))
5723 allocator = *allocatep;
5724 if (TREE_CODE (allocator) == TREE_LIST)
5725 allocator = TREE_PURPOSE (allocator);
5726 if (TREE_CODE (allocator) != INTEGER_CST)
5727 allocator = build_outer_var_ref (allocator, ctx);
5728 allocator = fold_convert (pointer_sized_int_node,
5729 allocator);
5730 allocate_ptr = unshare_expr (x);
5732 if (allocator == NULL_TREE)
5733 x = build_fold_addr_expr_loc (clause_loc, x);
5735 else if (lower_private_allocate (var, new_var, allocator,
5736 allocate_ptr,
5737 ilist, ctx, true, x))
5738 x = allocate_ptr;
5739 else if (TREE_CONSTANT (x))
5741 /* For reduction in SIMD loop, defer adding the
5742 initialization of the reference, because if we decide
5743 to use SIMD array for it, the initilization could cause
5744 expansion ICE. Ditto for other privatization clauses. */
5745 if (is_simd)
5746 x = NULL_TREE;
5747 else
5749 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5750 get_name (var));
5751 gimple_add_tmp_var (x);
5752 TREE_ADDRESSABLE (x) = 1;
5753 x = build_fold_addr_expr_loc (clause_loc, x);
5756 else
5758 tree atmp
5759 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5760 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5761 tree al = size_int (TYPE_ALIGN (rtype));
5762 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5765 if (x)
5767 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5768 gimplify_assign (new_var, x, ilist);
5771 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5773 else if ((c_kind == OMP_CLAUSE_REDUCTION
5774 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5775 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5777 if (pass == 0)
5778 continue;
5780 else if (pass != 0)
5781 continue;
5783 switch (OMP_CLAUSE_CODE (c))
5785 case OMP_CLAUSE_SHARED:
5786 /* Ignore shared directives in teams construct inside
5787 target construct. */
5788 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5789 && !is_host_teams_ctx (ctx))
5790 continue;
5791 /* Shared global vars are just accessed directly. */
5792 if (is_global_var (new_var))
5793 break;
5794 /* For taskloop firstprivate/lastprivate, represented
5795 as firstprivate and shared clause on the task, new_var
5796 is the firstprivate var. */
5797 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5798 break;
5799 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5800 needs to be delayed until after fixup_child_record_type so
5801 that we get the correct type during the dereference. */
5802 by_ref = use_pointer_for_field (var, ctx);
5803 x = build_receiver_ref (var, by_ref, ctx);
5804 SET_DECL_VALUE_EXPR (new_var, x);
5805 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5807 /* ??? If VAR is not passed by reference, and the variable
5808 hasn't been initialized yet, then we'll get a warning for
5809 the store into the omp_data_s structure. Ideally, we'd be
5810 able to notice this and not store anything at all, but
5811 we're generating code too early. Suppress the warning. */
5812 if (!by_ref)
5813 suppress_warning (var, OPT_Wuninitialized);
5814 break;
5816 case OMP_CLAUSE__CONDTEMP_:
5817 if (is_parallel_ctx (ctx))
5819 x = build_receiver_ref (var, false, ctx);
5820 SET_DECL_VALUE_EXPR (new_var, x);
5821 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5823 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5825 x = build_zero_cst (TREE_TYPE (var));
5826 goto do_private;
5828 break;
5830 case OMP_CLAUSE_LASTPRIVATE:
5831 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5832 break;
5833 /* FALLTHRU */
5835 case OMP_CLAUSE_PRIVATE:
5836 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5837 x = build_outer_var_ref (var, ctx);
5838 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5840 if (is_task_ctx (ctx))
5841 x = build_receiver_ref (var, false, ctx);
5842 else
5843 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5845 else
5846 x = NULL;
5847 do_private:
5848 tree nx;
5849 bool copy_ctor;
5850 copy_ctor = false;
5851 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5852 ilist, ctx, false, NULL_TREE);
5853 nx = unshare_expr (new_var);
5854 if (is_simd
5855 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5856 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5857 copy_ctor = true;
5858 if (copy_ctor)
5859 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5860 else
5861 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5862 if (is_simd)
5864 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5865 if ((TREE_ADDRESSABLE (new_var) || nx || y
5866 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5867 && (gimple_omp_for_collapse (ctx->stmt) != 1
5868 || (gimple_omp_for_index (ctx->stmt, 0)
5869 != new_var)))
5870 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5871 || omp_privatize_by_reference (var))
5872 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5873 ivar, lvar))
5875 if (omp_privatize_by_reference (var))
5877 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5878 tree new_vard = TREE_OPERAND (new_var, 0);
5879 gcc_assert (DECL_P (new_vard));
5880 SET_DECL_VALUE_EXPR (new_vard,
5881 build_fold_addr_expr (lvar));
5882 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5885 if (nx)
5887 tree iv = unshare_expr (ivar);
5888 if (copy_ctor)
5889 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5891 else
5892 x = lang_hooks.decls.omp_clause_default_ctor (c,
5896 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5898 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5899 unshare_expr (ivar), x);
5900 nx = x;
5902 if (nx && x)
5903 gimplify_and_add (x, &llist[0]);
5904 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5905 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5907 tree v = new_var;
5908 if (!DECL_P (v))
5910 gcc_assert (TREE_CODE (v) == MEM_REF);
5911 v = TREE_OPERAND (v, 0);
5912 gcc_assert (DECL_P (v));
5914 v = *ctx->lastprivate_conditional_map->get (v);
5915 tree t = create_tmp_var (TREE_TYPE (v));
5916 tree z = build_zero_cst (TREE_TYPE (v));
5917 tree orig_v
5918 = build_outer_var_ref (var, ctx,
5919 OMP_CLAUSE_LASTPRIVATE);
5920 gimple_seq_add_stmt (dlist,
5921 gimple_build_assign (t, z));
5922 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5923 tree civar = DECL_VALUE_EXPR (v);
5924 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5925 civar = unshare_expr (civar);
5926 TREE_OPERAND (civar, 1) = sctx.idx;
5927 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5928 unshare_expr (civar));
5929 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5930 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5931 orig_v, unshare_expr (ivar)));
5932 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5933 civar);
5934 x = build3 (COND_EXPR, void_type_node, cond, x,
5935 void_node);
5936 gimple_seq tseq = NULL;
5937 gimplify_and_add (x, &tseq);
5938 if (ctx->outer)
5939 lower_omp (&tseq, ctx->outer);
5940 gimple_seq_add_seq (&llist[1], tseq);
5942 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5943 && ctx->for_simd_scan_phase)
5945 x = unshare_expr (ivar);
5946 tree orig_v
5947 = build_outer_var_ref (var, ctx,
5948 OMP_CLAUSE_LASTPRIVATE);
5949 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5950 orig_v);
5951 gimplify_and_add (x, &llist[0]);
5953 if (y)
5955 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5956 if (y)
5957 gimplify_and_add (y, &llist[1]);
5959 break;
5961 if (omp_privatize_by_reference (var))
5963 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5964 tree new_vard = TREE_OPERAND (new_var, 0);
5965 gcc_assert (DECL_P (new_vard));
5966 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5967 x = TYPE_SIZE_UNIT (type);
5968 if (TREE_CONSTANT (x))
5970 x = create_tmp_var_raw (type, get_name (var));
5971 gimple_add_tmp_var (x);
5972 TREE_ADDRESSABLE (x) = 1;
5973 x = build_fold_addr_expr_loc (clause_loc, x);
5974 x = fold_convert_loc (clause_loc,
5975 TREE_TYPE (new_vard), x);
5976 gimplify_assign (new_vard, x, ilist);
5980 if (nx)
5981 gimplify_and_add (nx, ilist);
5982 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5983 && is_simd
5984 && ctx->for_simd_scan_phase)
5986 tree orig_v = build_outer_var_ref (var, ctx,
5987 OMP_CLAUSE_LASTPRIVATE);
5988 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5989 orig_v);
5990 gimplify_and_add (x, ilist);
5992 /* FALLTHRU */
5994 do_dtor:
5995 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5996 if (x)
5997 gimplify_and_add (x, dlist);
5998 if (allocator)
6000 if (!is_gimple_val (allocator))
6002 tree avar = create_tmp_var (TREE_TYPE (allocator));
6003 gimplify_assign (avar, allocator, dlist);
6004 allocator = avar;
6006 if (!is_gimple_val (allocate_ptr))
6008 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6009 gimplify_assign (apvar, allocate_ptr, dlist);
6010 allocate_ptr = apvar;
6012 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
6013 gimple *g
6014 = gimple_build_call (f, 2, allocate_ptr, allocator);
6015 gimple_seq_add_stmt (dlist, g);
6017 break;
6019 case OMP_CLAUSE_LINEAR:
6020 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6021 goto do_firstprivate;
6022 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6023 x = NULL;
6024 else
6025 x = build_outer_var_ref (var, ctx);
6026 goto do_private;
6028 case OMP_CLAUSE_FIRSTPRIVATE:
6029 if (is_task_ctx (ctx))
6031 if ((omp_privatize_by_reference (var)
6032 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6033 || is_variable_sized (var))
6034 goto do_dtor;
6035 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
6036 ctx))
6037 || use_pointer_for_field (var, NULL))
6039 x = build_receiver_ref (var, false, ctx);
6040 if (ctx->allocate_map)
6041 if (tree *allocatep = ctx->allocate_map->get (var))
6043 allocator = *allocatep;
6044 if (TREE_CODE (allocator) == TREE_LIST)
6045 allocator = TREE_PURPOSE (allocator);
6046 if (TREE_CODE (allocator) != INTEGER_CST)
6047 allocator = build_outer_var_ref (allocator, ctx);
6048 allocator = fold_convert (pointer_sized_int_node,
6049 allocator);
6050 allocate_ptr = unshare_expr (x);
6051 x = build_simple_mem_ref (x);
6052 TREE_THIS_NOTRAP (x) = 1;
6054 SET_DECL_VALUE_EXPR (new_var, x);
6055 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6056 goto do_dtor;
6059 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6060 && omp_privatize_by_reference (var))
6062 x = build_outer_var_ref (var, ctx);
6063 gcc_assert (TREE_CODE (x) == MEM_REF
6064 && integer_zerop (TREE_OPERAND (x, 1)));
6065 x = TREE_OPERAND (x, 0);
6066 x = lang_hooks.decls.omp_clause_copy_ctor
6067 (c, unshare_expr (new_var), x);
6068 gimplify_and_add (x, ilist);
6069 goto do_dtor;
6071 do_firstprivate:
6072 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6073 ilist, ctx, false, NULL_TREE);
6074 x = build_outer_var_ref (var, ctx);
6075 if (is_simd)
6077 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6078 && gimple_omp_for_combined_into_p (ctx->stmt))
6080 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6081 if (DECL_P (t))
6082 t = build_outer_var_ref (t, ctx);
6083 tree stept = TREE_TYPE (t);
6084 tree ct = omp_find_clause (clauses,
6085 OMP_CLAUSE__LOOPTEMP_);
6086 gcc_assert (ct);
6087 tree l = OMP_CLAUSE_DECL (ct);
6088 tree n1 = fd->loop.n1;
6089 tree step = fd->loop.step;
6090 tree itype = TREE_TYPE (l);
6091 if (POINTER_TYPE_P (itype))
6092 itype = signed_type_for (itype);
6093 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6094 if (TYPE_UNSIGNED (itype)
6095 && fd->loop.cond_code == GT_EXPR)
6096 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6097 fold_build1 (NEGATE_EXPR, itype, l),
6098 fold_build1 (NEGATE_EXPR,
6099 itype, step));
6100 else
6101 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6102 t = fold_build2 (MULT_EXPR, stept,
6103 fold_convert (stept, l), t);
6105 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6107 if (omp_privatize_by_reference (var))
6109 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6110 tree new_vard = TREE_OPERAND (new_var, 0);
6111 gcc_assert (DECL_P (new_vard));
6112 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6113 nx = TYPE_SIZE_UNIT (type);
6114 if (TREE_CONSTANT (nx))
6116 nx = create_tmp_var_raw (type,
6117 get_name (var));
6118 gimple_add_tmp_var (nx);
6119 TREE_ADDRESSABLE (nx) = 1;
6120 nx = build_fold_addr_expr_loc (clause_loc,
6121 nx);
6122 nx = fold_convert_loc (clause_loc,
6123 TREE_TYPE (new_vard),
6124 nx);
6125 gimplify_assign (new_vard, nx, ilist);
6129 x = lang_hooks.decls.omp_clause_linear_ctor
6130 (c, new_var, x, t);
6131 gimplify_and_add (x, ilist);
6132 goto do_dtor;
6135 if (POINTER_TYPE_P (TREE_TYPE (x)))
6136 x = fold_build_pointer_plus (x, t);
6137 else
6138 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x,
6139 fold_convert (TREE_TYPE (x), t));
6142 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6143 || TREE_ADDRESSABLE (new_var)
6144 || omp_privatize_by_reference (var))
6145 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6146 ivar, lvar))
6148 if (omp_privatize_by_reference (var))
6150 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6151 tree new_vard = TREE_OPERAND (new_var, 0);
6152 gcc_assert (DECL_P (new_vard));
6153 SET_DECL_VALUE_EXPR (new_vard,
6154 build_fold_addr_expr (lvar));
6155 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6157 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6159 tree iv = create_tmp_var (TREE_TYPE (new_var));
6160 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6161 gimplify_and_add (x, ilist);
6162 gimple_stmt_iterator gsi
6163 = gsi_start (*gimple_omp_body_ptr (ctx->stmt));
6164 gassign *g
6165 = gimple_build_assign (unshare_expr (lvar), iv);
6166 gsi_insert_before_without_update (&gsi, g,
6167 GSI_SAME_STMT);
6168 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6169 enum tree_code code = PLUS_EXPR;
6170 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6171 code = POINTER_PLUS_EXPR;
6172 g = gimple_build_assign (iv, code, iv, t);
6173 gsi_insert_before_without_update (&gsi, g,
6174 GSI_SAME_STMT);
6175 break;
6177 x = lang_hooks.decls.omp_clause_copy_ctor
6178 (c, unshare_expr (ivar), x);
6179 gimplify_and_add (x, &llist[0]);
6180 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6181 if (x)
6182 gimplify_and_add (x, &llist[1]);
6183 break;
6185 if (omp_privatize_by_reference (var))
6187 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6188 tree new_vard = TREE_OPERAND (new_var, 0);
6189 gcc_assert (DECL_P (new_vard));
6190 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6191 nx = TYPE_SIZE_UNIT (type);
6192 if (TREE_CONSTANT (nx))
6194 nx = create_tmp_var_raw (type, get_name (var));
6195 gimple_add_tmp_var (nx);
6196 TREE_ADDRESSABLE (nx) = 1;
6197 nx = build_fold_addr_expr_loc (clause_loc, nx);
6198 nx = fold_convert_loc (clause_loc,
6199 TREE_TYPE (new_vard), nx);
6200 gimplify_assign (new_vard, nx, ilist);
6204 x = lang_hooks.decls.omp_clause_copy_ctor
6205 (c, unshare_expr (new_var), x);
6206 gimplify_and_add (x, ilist);
6207 goto do_dtor;
6209 case OMP_CLAUSE__LOOPTEMP_:
6210 case OMP_CLAUSE__REDUCTEMP_:
6211 gcc_assert (is_taskreg_ctx (ctx));
6212 x = build_outer_var_ref (var, ctx);
6213 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6214 gimplify_and_add (x, ilist);
6215 break;
6217 case OMP_CLAUSE_COPYIN:
6218 by_ref = use_pointer_for_field (var, NULL);
6219 x = build_receiver_ref (var, by_ref, ctx);
6220 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6221 append_to_statement_list (x, &copyin_seq);
6222 copyin_by_ref |= by_ref;
6223 break;
6225 case OMP_CLAUSE_REDUCTION:
6226 case OMP_CLAUSE_IN_REDUCTION:
6227 /* OpenACC reductions are initialized using the
6228 GOACC_REDUCTION internal function. */
6229 if (is_gimple_omp_oacc (ctx->stmt))
6230 break;
6231 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6233 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6234 gimple *tseq;
6235 tree ptype = TREE_TYPE (placeholder);
6236 if (cond)
6238 x = error_mark_node;
6239 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6240 && !task_reduction_needs_orig_p)
6241 x = var;
6242 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6244 tree pptype = build_pointer_type (ptype);
6245 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6246 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6247 size_int (task_reduction_cnt_full
6248 + task_reduction_cntorig - 1),
6249 NULL_TREE, NULL_TREE);
6250 else
6252 unsigned int idx
6253 = *ctx->task_reduction_map->get (c);
6254 x = task_reduction_read (ilist, tskred_temp,
6255 pptype, 7 + 3 * idx);
6257 x = fold_convert (pptype, x);
6258 x = build_simple_mem_ref (x);
6261 else
6263 lower_private_allocate (var, new_var, allocator,
6264 allocate_ptr, ilist, ctx, false,
6265 NULL_TREE);
6266 x = build_outer_var_ref (var, ctx);
6268 if (omp_privatize_by_reference (var)
6269 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6270 x = build_fold_addr_expr_loc (clause_loc, x);
6272 SET_DECL_VALUE_EXPR (placeholder, x);
6273 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6274 tree new_vard = new_var;
6275 if (omp_privatize_by_reference (var))
6277 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6278 new_vard = TREE_OPERAND (new_var, 0);
6279 gcc_assert (DECL_P (new_vard));
6281 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6282 if (is_simd
6283 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6284 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6285 rvarp = &rvar;
6286 if (is_simd
6287 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6288 ivar, lvar, rvarp,
6289 &rvar2))
6291 if (new_vard == new_var)
6293 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6294 SET_DECL_VALUE_EXPR (new_var, ivar);
6296 else
6298 SET_DECL_VALUE_EXPR (new_vard,
6299 build_fold_addr_expr (ivar));
6300 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6302 x = lang_hooks.decls.omp_clause_default_ctor
6303 (c, unshare_expr (ivar),
6304 build_outer_var_ref (var, ctx));
6305 if (rvarp && ctx->for_simd_scan_phase)
6307 if (x)
6308 gimplify_and_add (x, &llist[0]);
6309 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6310 if (x)
6311 gimplify_and_add (x, &llist[1]);
6312 break;
6314 else if (rvarp)
6316 if (x)
6318 gimplify_and_add (x, &llist[0]);
6320 tree ivar2 = unshare_expr (lvar);
6321 TREE_OPERAND (ivar2, 1) = sctx.idx;
6322 x = lang_hooks.decls.omp_clause_default_ctor
6323 (c, ivar2, build_outer_var_ref (var, ctx));
6324 gimplify_and_add (x, &llist[0]);
6326 if (rvar2)
6328 x = lang_hooks.decls.omp_clause_default_ctor
6329 (c, unshare_expr (rvar2),
6330 build_outer_var_ref (var, ctx));
6331 gimplify_and_add (x, &llist[0]);
6334 /* For types that need construction, add another
6335 private var which will be default constructed
6336 and optionally initialized with
6337 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6338 loop we want to assign this value instead of
6339 constructing and destructing it in each
6340 iteration. */
6341 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6342 gimple_add_tmp_var (nv);
6343 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6344 ? rvar2
6345 : ivar, 0),
6346 nv);
6347 x = lang_hooks.decls.omp_clause_default_ctor
6348 (c, nv, build_outer_var_ref (var, ctx));
6349 gimplify_and_add (x, ilist);
6351 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6353 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6354 x = DECL_VALUE_EXPR (new_vard);
6355 tree vexpr = nv;
6356 if (new_vard != new_var)
6357 vexpr = build_fold_addr_expr (nv);
6358 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6359 lower_omp (&tseq, ctx);
6360 SET_DECL_VALUE_EXPR (new_vard, x);
6361 gimple_seq_add_seq (ilist, tseq);
6362 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6365 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6366 if (x)
6367 gimplify_and_add (x, dlist);
6370 tree ref = build_outer_var_ref (var, ctx);
6371 x = unshare_expr (ivar);
6372 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6373 ref);
6374 gimplify_and_add (x, &llist[0]);
6376 ref = build_outer_var_ref (var, ctx);
6377 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6378 rvar);
6379 gimplify_and_add (x, &llist[3]);
6381 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6382 if (new_vard == new_var)
6383 SET_DECL_VALUE_EXPR (new_var, lvar);
6384 else
6385 SET_DECL_VALUE_EXPR (new_vard,
6386 build_fold_addr_expr (lvar));
6388 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6389 if (x)
6390 gimplify_and_add (x, &llist[1]);
6392 tree ivar2 = unshare_expr (lvar);
6393 TREE_OPERAND (ivar2, 1) = sctx.idx;
6394 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6395 if (x)
6396 gimplify_and_add (x, &llist[1]);
6398 if (rvar2)
6400 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6401 if (x)
6402 gimplify_and_add (x, &llist[1]);
6404 break;
6406 if (x)
6407 gimplify_and_add (x, &llist[0]);
6408 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6410 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6411 lower_omp (&tseq, ctx);
6412 gimple_seq_add_seq (&llist[0], tseq);
6414 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6415 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6416 lower_omp (&tseq, ctx);
6417 gimple_seq_add_seq (&llist[1], tseq);
6418 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6419 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6420 if (new_vard == new_var)
6421 SET_DECL_VALUE_EXPR (new_var, lvar);
6422 else
6423 SET_DECL_VALUE_EXPR (new_vard,
6424 build_fold_addr_expr (lvar));
6425 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6426 if (x)
6427 gimplify_and_add (x, &llist[1]);
6428 break;
6430 /* If this is a reference to constant size reduction var
6431 with placeholder, we haven't emitted the initializer
6432 for it because it is undesirable if SIMD arrays are used.
6433 But if they aren't used, we need to emit the deferred
6434 initialization now. */
6435 else if (omp_privatize_by_reference (var) && is_simd)
6436 handle_simd_reference (clause_loc, new_vard, ilist);
6438 tree lab2 = NULL_TREE;
6439 if (cond)
6441 gimple *g;
6442 if (!is_parallel_ctx (ctx))
6444 tree condv = create_tmp_var (boolean_type_node);
6445 tree m = build_simple_mem_ref (cond);
6446 g = gimple_build_assign (condv, m);
6447 gimple_seq_add_stmt (ilist, g);
6448 tree lab1
6449 = create_artificial_label (UNKNOWN_LOCATION);
6450 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6451 g = gimple_build_cond (NE_EXPR, condv,
6452 boolean_false_node,
6453 lab2, lab1);
6454 gimple_seq_add_stmt (ilist, g);
6455 gimple_seq_add_stmt (ilist,
6456 gimple_build_label (lab1));
6458 g = gimple_build_assign (build_simple_mem_ref (cond),
6459 boolean_true_node);
6460 gimple_seq_add_stmt (ilist, g);
6462 x = lang_hooks.decls.omp_clause_default_ctor
6463 (c, unshare_expr (new_var),
6464 cond ? NULL_TREE
6465 : build_outer_var_ref (var, ctx));
6466 if (x)
6467 gimplify_and_add (x, ilist);
6469 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6470 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6472 if (ctx->for_simd_scan_phase)
6473 goto do_dtor;
6474 if (x || (!is_simd
6475 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6477 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6478 gimple_add_tmp_var (nv);
6479 ctx->cb.decl_map->put (new_vard, nv);
6480 x = lang_hooks.decls.omp_clause_default_ctor
6481 (c, nv, build_outer_var_ref (var, ctx));
6482 if (x)
6483 gimplify_and_add (x, ilist);
6484 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6486 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6487 tree vexpr = nv;
6488 if (new_vard != new_var)
6489 vexpr = build_fold_addr_expr (nv);
6490 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6491 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6492 lower_omp (&tseq, ctx);
6493 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6494 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6495 gimple_seq_add_seq (ilist, tseq);
6497 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6498 if (is_simd && ctx->scan_exclusive)
6500 tree nv2
6501 = create_tmp_var_raw (TREE_TYPE (new_var));
6502 gimple_add_tmp_var (nv2);
6503 ctx->cb.decl_map->put (nv, nv2);
6504 x = lang_hooks.decls.omp_clause_default_ctor
6505 (c, nv2, build_outer_var_ref (var, ctx));
6506 gimplify_and_add (x, ilist);
6507 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6508 if (x)
6509 gimplify_and_add (x, dlist);
6511 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6512 if (x)
6513 gimplify_and_add (x, dlist);
6515 else if (is_simd
6516 && ctx->scan_exclusive
6517 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6519 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6520 gimple_add_tmp_var (nv2);
6521 ctx->cb.decl_map->put (new_vard, nv2);
6522 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6523 if (x)
6524 gimplify_and_add (x, dlist);
6526 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6527 goto do_dtor;
6530 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6532 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6533 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6534 && is_omp_target (ctx->stmt))
6536 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6537 tree oldv = NULL_TREE;
6538 gcc_assert (d);
6539 if (DECL_HAS_VALUE_EXPR_P (d))
6540 oldv = DECL_VALUE_EXPR (d);
6541 SET_DECL_VALUE_EXPR (d, new_vard);
6542 DECL_HAS_VALUE_EXPR_P (d) = 1;
6543 lower_omp (&tseq, ctx);
6544 if (oldv)
6545 SET_DECL_VALUE_EXPR (d, oldv);
6546 else
6548 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6549 DECL_HAS_VALUE_EXPR_P (d) = 0;
6552 else
6553 lower_omp (&tseq, ctx);
6554 gimple_seq_add_seq (ilist, tseq);
6556 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6557 if (is_simd)
6559 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6560 lower_omp (&tseq, ctx);
6561 gimple_seq_add_seq (dlist, tseq);
6562 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6564 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6565 if (cond)
6567 if (lab2)
6568 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6569 break;
6571 goto do_dtor;
6573 else
6575 x = omp_reduction_init (c, TREE_TYPE (new_var));
6576 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6577 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6579 if (cond)
6581 gimple *g;
6582 tree lab2 = NULL_TREE;
6583 /* GOMP_taskgroup_reduction_register memsets the whole
6584 array to zero. If the initializer is zero, we don't
6585 need to initialize it again, just mark it as ever
6586 used unconditionally, i.e. cond = true. */
6587 if (initializer_zerop (x))
6589 g = gimple_build_assign (build_simple_mem_ref (cond),
6590 boolean_true_node);
6591 gimple_seq_add_stmt (ilist, g);
6592 break;
6595 /* Otherwise, emit
6596 if (!cond) { cond = true; new_var = x; } */
6597 if (!is_parallel_ctx (ctx))
6599 tree condv = create_tmp_var (boolean_type_node);
6600 tree m = build_simple_mem_ref (cond);
6601 g = gimple_build_assign (condv, m);
6602 gimple_seq_add_stmt (ilist, g);
6603 tree lab1
6604 = create_artificial_label (UNKNOWN_LOCATION);
6605 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6606 g = gimple_build_cond (NE_EXPR, condv,
6607 boolean_false_node,
6608 lab2, lab1);
6609 gimple_seq_add_stmt (ilist, g);
6610 gimple_seq_add_stmt (ilist,
6611 gimple_build_label (lab1));
6613 g = gimple_build_assign (build_simple_mem_ref (cond),
6614 boolean_true_node);
6615 gimple_seq_add_stmt (ilist, g);
6616 gimplify_assign (new_var, x, ilist);
6617 if (lab2)
6618 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6619 break;
6622 /* reduction(-:var) sums up the partial results, so it
6623 acts identically to reduction(+:var). */
6624 if (code == MINUS_EXPR)
6625 code = PLUS_EXPR;
6627 bool is_truth_op
6628 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6629 tree new_vard = new_var;
6630 if (is_simd && omp_privatize_by_reference (var))
6632 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6633 new_vard = TREE_OPERAND (new_var, 0);
6634 gcc_assert (DECL_P (new_vard));
6636 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6637 if (is_simd
6638 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6639 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6640 rvarp = &rvar;
6641 if (is_simd
6642 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6643 ivar, lvar, rvarp,
6644 &rvar2))
6646 if (new_vard != new_var)
6648 SET_DECL_VALUE_EXPR (new_vard,
6649 build_fold_addr_expr (lvar));
6650 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6653 tree ref = build_outer_var_ref (var, ctx);
6655 if (rvarp)
6657 if (ctx->for_simd_scan_phase)
6658 break;
6659 gimplify_assign (ivar, ref, &llist[0]);
6660 ref = build_outer_var_ref (var, ctx);
6661 gimplify_assign (ref, rvar, &llist[3]);
6662 break;
6665 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6667 if (sctx.is_simt)
6669 if (!simt_lane)
6670 simt_lane = create_tmp_var (unsigned_type_node);
6671 x = build_call_expr_internal_loc
6672 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6673 TREE_TYPE (ivar), 2, ivar, simt_lane);
6674 /* Make sure x is evaluated unconditionally. */
6675 tree bfly_var = create_tmp_var (TREE_TYPE (ivar));
6676 gimplify_assign (bfly_var, x, &llist[2]);
6677 x = build2 (code, TREE_TYPE (ivar), ivar, bfly_var);
6678 gimplify_assign (ivar, x, &llist[2]);
6680 tree ivar2 = ivar;
6681 tree ref2 = ref;
6682 if (is_truth_op)
6684 tree zero = build_zero_cst (TREE_TYPE (ivar));
6685 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6686 boolean_type_node, ivar,
6687 zero);
6688 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6689 boolean_type_node, ref,
6690 zero);
6692 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6693 if (is_truth_op)
6694 x = fold_convert (TREE_TYPE (ref), x);
6695 ref = build_outer_var_ref (var, ctx);
6696 gimplify_assign (ref, x, &llist[1]);
6699 else
6701 lower_private_allocate (var, new_var, allocator,
6702 allocate_ptr, ilist, ctx,
6703 false, NULL_TREE);
6704 if (omp_privatize_by_reference (var) && is_simd)
6705 handle_simd_reference (clause_loc, new_vard, ilist);
6706 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6707 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6708 break;
6709 gimplify_assign (new_var, x, ilist);
6710 if (is_simd)
6712 tree ref = build_outer_var_ref (var, ctx);
6713 tree new_var2 = new_var;
6714 tree ref2 = ref;
6715 if (is_truth_op)
6717 tree zero = build_zero_cst (TREE_TYPE (new_var));
6718 new_var2
6719 = fold_build2_loc (clause_loc, NE_EXPR,
6720 boolean_type_node, new_var,
6721 zero);
6722 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6723 boolean_type_node, ref,
6724 zero);
6726 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6727 if (is_truth_op)
6728 x = fold_convert (TREE_TYPE (new_var), x);
6729 ref = build_outer_var_ref (var, ctx);
6730 gimplify_assign (ref, x, dlist);
6732 if (allocator)
6733 goto do_dtor;
6736 break;
6738 default:
6739 gcc_unreachable ();
6743 if (tskred_avar)
6745 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6746 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6749 if (known_eq (sctx.max_vf, 1U))
6751 sctx.is_simt = false;
6752 if (ctx->lastprivate_conditional_map)
6754 if (gimple_omp_for_combined_into_p (ctx->stmt))
6756 /* Signal to lower_omp_1 that it should use parent context. */
6757 ctx->combined_into_simd_safelen1 = true;
6758 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6759 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6760 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6762 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6763 omp_context *outer = ctx->outer;
6764 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6765 outer = outer->outer;
6766 tree *v = ctx->lastprivate_conditional_map->get (o);
6767 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6768 tree *pv = outer->lastprivate_conditional_map->get (po);
6769 *v = *pv;
6772 else
6774 /* When not vectorized, treat lastprivate(conditional:) like
6775 normal lastprivate, as there will be just one simd lane
6776 writing the privatized variable. */
6777 delete ctx->lastprivate_conditional_map;
6778 ctx->lastprivate_conditional_map = NULL;
6783 if (nonconst_simd_if)
6785 if (sctx.lane == NULL_TREE)
6787 sctx.idx = create_tmp_var (unsigned_type_node);
6788 sctx.lane = create_tmp_var (unsigned_type_node);
6790 /* FIXME: For now. */
6791 sctx.is_simt = false;
6794 if (sctx.lane || sctx.is_simt)
6796 uid = create_tmp_var (ptr_type_node, "simduid");
6797 /* Don't want uninit warnings on simduid, it is always uninitialized,
6798 but we use it not for the value, but for the DECL_UID only. */
6799 suppress_warning (uid, OPT_Wuninitialized);
6800 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6801 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6802 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6803 gimple_omp_for_set_clauses (ctx->stmt, c);
6805 /* Emit calls denoting privatized variables and initializing a pointer to
6806 structure that holds private variables as fields after ompdevlow pass. */
6807 if (sctx.is_simt)
6809 sctx.simt_eargs[0] = uid;
6810 gimple *g
6811 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6812 gimple_call_set_lhs (g, uid);
6813 gimple_seq_add_stmt (ilist, g);
6814 sctx.simt_eargs.release ();
6816 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6817 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6818 gimple_call_set_lhs (g, simtrec);
6819 gimple_seq_add_stmt (ilist, g);
6821 if (sctx.lane)
6823 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6824 2 + (nonconst_simd_if != NULL),
6825 uid, integer_zero_node,
6826 nonconst_simd_if);
6827 gimple_call_set_lhs (g, sctx.lane);
6828 gimple_stmt_iterator gsi = gsi_start (*gimple_omp_body_ptr (ctx->stmt));
6829 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6830 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6831 build_int_cst (unsigned_type_node, 0));
6832 gimple_seq_add_stmt (ilist, g);
6833 if (sctx.lastlane)
6835 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6836 2, uid, sctx.lane);
6837 gimple_call_set_lhs (g, sctx.lastlane);
6838 gimple_seq_add_stmt (dlist, g);
6839 gimple_seq_add_seq (dlist, llist[3]);
6841 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6842 if (llist[2])
6844 tree simt_vf = create_tmp_var (unsigned_type_node);
6845 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6846 gimple_call_set_lhs (g, simt_vf);
6847 gimple_seq_add_stmt (dlist, g);
6849 tree t = build_int_cst (unsigned_type_node, 1);
6850 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6851 gimple_seq_add_stmt (dlist, g);
6853 t = build_int_cst (unsigned_type_node, 0);
6854 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6855 gimple_seq_add_stmt (dlist, g);
6857 tree body = create_artificial_label (UNKNOWN_LOCATION);
6858 tree header = create_artificial_label (UNKNOWN_LOCATION);
6859 tree end = create_artificial_label (UNKNOWN_LOCATION);
6860 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6861 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6863 gimple_seq_add_seq (dlist, llist[2]);
6865 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6866 gimple_seq_add_stmt (dlist, g);
6868 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6869 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6870 gimple_seq_add_stmt (dlist, g);
6872 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6874 for (int i = 0; i < 2; i++)
6875 if (llist[i])
6877 tree vf = create_tmp_var (unsigned_type_node);
6878 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6879 gimple_call_set_lhs (g, vf);
6880 gimple_seq *seq = i == 0 ? ilist : dlist;
6881 gimple_seq_add_stmt (seq, g);
6882 tree t = build_int_cst (unsigned_type_node, 0);
6883 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6884 gimple_seq_add_stmt (seq, g);
6885 tree body = create_artificial_label (UNKNOWN_LOCATION);
6886 tree header = create_artificial_label (UNKNOWN_LOCATION);
6887 tree end = create_artificial_label (UNKNOWN_LOCATION);
6888 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6889 gimple_seq_add_stmt (seq, gimple_build_label (body));
6890 gimple_seq_add_seq (seq, llist[i]);
6891 t = build_int_cst (unsigned_type_node, 1);
6892 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6893 gimple_seq_add_stmt (seq, g);
6894 gimple_seq_add_stmt (seq, gimple_build_label (header));
6895 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6896 gimple_seq_add_stmt (seq, g);
6897 gimple_seq_add_stmt (seq, gimple_build_label (end));
6900 if (sctx.is_simt)
6902 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6903 gimple *g
6904 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6905 gimple_seq_add_stmt (dlist, g);
6908 /* The copyin sequence is not to be executed by the main thread, since
6909 that would result in self-copies. Perhaps not visible to scalars,
6910 but it certainly is to C++ operator=. */
6911 if (copyin_seq)
6913 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6915 x = build2 (NE_EXPR, boolean_type_node, x,
6916 build_int_cst (TREE_TYPE (x), 0));
6917 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6918 gimplify_and_add (x, ilist);
6921 /* If any copyin variable is passed by reference, we must ensure the
6922 master thread doesn't modify it before it is copied over in all
6923 threads. Similarly for variables in both firstprivate and
6924 lastprivate clauses we need to ensure the lastprivate copying
6925 happens after firstprivate copying in all threads. And similarly
6926 for UDRs if initializer expression refers to omp_orig. */
6927 if (copyin_by_ref || lastprivate_firstprivate
6928 || (reduction_omp_orig_ref
6929 && !ctx->scan_inclusive
6930 && !ctx->scan_exclusive))
6932 /* Don't add any barrier for #pragma omp simd or
6933 #pragma omp distribute. */
6934 if (!is_task_ctx (ctx)
6935 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6936 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6937 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6940 /* If max_vf is non-zero, then we can use only a vectorization factor
6941 up to the max_vf we chose. So stick it into the safelen clause. */
6942 if (maybe_ne (sctx.max_vf, 0U))
6944 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6945 OMP_CLAUSE_SAFELEN);
6946 poly_uint64 safe_len;
6947 if (c == NULL_TREE
6948 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6949 && maybe_gt (safe_len, sctx.max_vf)))
6951 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6952 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6953 sctx.max_vf);
6954 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6955 gimple_omp_for_set_clauses (ctx->stmt, c);
6960 /* Create temporary variables for lastprivate(conditional:) implementation
6961 in context CTX with CLAUSES. */
6963 static void
6964 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6966 tree iter_type = NULL_TREE;
6967 tree cond_ptr = NULL_TREE;
6968 tree iter_var = NULL_TREE;
6969 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6970 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6971 tree next = *clauses;
6972 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6973 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6974 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6976 if (is_simd)
6978 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6979 gcc_assert (cc);
6980 if (iter_type == NULL_TREE)
6982 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6983 iter_var = create_tmp_var_raw (iter_type);
6984 DECL_CONTEXT (iter_var) = current_function_decl;
6985 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6986 DECL_CHAIN (iter_var) = ctx->block_vars;
6987 ctx->block_vars = iter_var;
6988 tree c3
6989 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6990 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6991 OMP_CLAUSE_DECL (c3) = iter_var;
6992 OMP_CLAUSE_CHAIN (c3) = *clauses;
6993 *clauses = c3;
6994 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6996 next = OMP_CLAUSE_CHAIN (cc);
6997 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6998 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6999 ctx->lastprivate_conditional_map->put (o, v);
7000 continue;
7002 if (iter_type == NULL)
7004 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
7006 struct omp_for_data fd;
7007 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
7008 NULL);
7009 iter_type = unsigned_type_for (fd.iter_type);
7011 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
7012 iter_type = unsigned_type_node;
7013 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
7014 if (c2)
7016 cond_ptr
7017 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7018 OMP_CLAUSE_DECL (c2) = cond_ptr;
7020 else
7022 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7023 DECL_CONTEXT (cond_ptr) = current_function_decl;
7024 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7025 DECL_CHAIN (cond_ptr) = ctx->block_vars;
7026 ctx->block_vars = cond_ptr;
7027 c2 = build_omp_clause (UNKNOWN_LOCATION,
7028 OMP_CLAUSE__CONDTEMP_);
7029 OMP_CLAUSE_DECL (c2) = cond_ptr;
7030 OMP_CLAUSE_CHAIN (c2) = *clauses;
7031 *clauses = c2;
7033 iter_var = create_tmp_var_raw (iter_type);
7034 DECL_CONTEXT (iter_var) = current_function_decl;
7035 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7036 DECL_CHAIN (iter_var) = ctx->block_vars;
7037 ctx->block_vars = iter_var;
7038 tree c3
7039 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7040 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7041 OMP_CLAUSE_DECL (c3) = iter_var;
7042 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7043 OMP_CLAUSE_CHAIN (c2) = c3;
7044 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7046 tree v = create_tmp_var_raw (iter_type);
7047 DECL_CONTEXT (v) = current_function_decl;
7048 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7049 DECL_CHAIN (v) = ctx->block_vars;
7050 ctx->block_vars = v;
7051 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7052 ctx->lastprivate_conditional_map->put (o, v);
7057 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7058 both parallel and workshare constructs. PREDICATE may be NULL if it's
7059 always true. BODY_P is the sequence to insert early initialization
7060 if needed, STMT_LIST is where the non-conditional lastprivate handling
7061 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7062 section. */
7064 static void
7065 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7066 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7067 omp_context *ctx)
7069 tree x, c, label = NULL, orig_clauses = clauses;
7070 bool par_clauses = false;
7071 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7072 unsigned HOST_WIDE_INT conditional_off = 0;
7073 gimple_seq post_stmt_list = NULL;
7075 /* Early exit if there are no lastprivate or linear clauses. */
7076 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7077 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7078 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7079 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7080 break;
7081 if (clauses == NULL)
7083 /* If this was a workshare clause, see if it had been combined
7084 with its parallel. In that case, look for the clauses on the
7085 parallel statement itself. */
7086 if (is_parallel_ctx (ctx))
7087 return;
7089 ctx = ctx->outer;
7090 if (ctx == NULL || !is_parallel_ctx (ctx))
7091 return;
7093 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7094 OMP_CLAUSE_LASTPRIVATE);
7095 if (clauses == NULL)
7096 return;
7097 par_clauses = true;
7100 bool maybe_simt = false;
7101 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7102 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7104 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7105 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7106 if (simduid)
7107 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7110 if (predicate)
7112 gcond *stmt;
7113 tree label_true, arm1, arm2;
7114 enum tree_code pred_code = TREE_CODE (predicate);
7116 label = create_artificial_label (UNKNOWN_LOCATION);
7117 label_true = create_artificial_label (UNKNOWN_LOCATION);
7118 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7120 arm1 = TREE_OPERAND (predicate, 0);
7121 arm2 = TREE_OPERAND (predicate, 1);
7122 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7123 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7125 else
7127 arm1 = predicate;
7128 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7129 arm2 = boolean_false_node;
7130 pred_code = NE_EXPR;
7132 if (maybe_simt)
7134 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7135 c = fold_convert (integer_type_node, c);
7136 simtcond = create_tmp_var (integer_type_node);
7137 gimplify_assign (simtcond, c, stmt_list);
7138 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7139 1, simtcond);
7140 c = create_tmp_var (integer_type_node);
7141 gimple_call_set_lhs (g, c);
7142 gimple_seq_add_stmt (stmt_list, g);
7143 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7144 label_true, label);
7146 else
7147 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7148 gimple_seq_add_stmt (stmt_list, stmt);
7149 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7152 tree cond_ptr = NULL_TREE;
7153 for (c = clauses; c ;)
7155 tree var, new_var;
7156 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7157 gimple_seq *this_stmt_list = stmt_list;
7158 tree lab2 = NULL_TREE;
7160 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7161 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7162 && ctx->lastprivate_conditional_map
7163 && !ctx->combined_into_simd_safelen1)
7165 gcc_assert (body_p);
7166 if (simduid)
7167 goto next;
7168 if (cond_ptr == NULL_TREE)
7170 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7171 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7173 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7174 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7175 tree v = *ctx->lastprivate_conditional_map->get (o);
7176 gimplify_assign (v, build_zero_cst (type), body_p);
7177 this_stmt_list = cstmt_list;
7178 tree mem;
7179 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7181 mem = build2 (MEM_REF, type, cond_ptr,
7182 build_int_cst (TREE_TYPE (cond_ptr),
7183 conditional_off));
7184 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7186 else
7187 mem = build4 (ARRAY_REF, type, cond_ptr,
7188 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7189 tree mem2 = copy_node (mem);
7190 gimple_seq seq = NULL;
7191 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7192 gimple_seq_add_seq (this_stmt_list, seq);
7193 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7194 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7195 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7196 gimple_seq_add_stmt (this_stmt_list, g);
7197 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7198 gimplify_assign (mem2, v, this_stmt_list);
7200 else if (predicate
7201 && ctx->combined_into_simd_safelen1
7202 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7203 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7204 && ctx->lastprivate_conditional_map)
7205 this_stmt_list = &post_stmt_list;
7207 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7208 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7209 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7211 var = OMP_CLAUSE_DECL (c);
7212 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7213 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7214 && is_taskloop_ctx (ctx))
7216 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7217 new_var = lookup_decl (var, ctx->outer);
7219 else
7221 new_var = lookup_decl (var, ctx);
7222 /* Avoid uninitialized warnings for lastprivate and
7223 for linear iterators. */
7224 if (predicate
7225 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7226 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7227 suppress_warning (new_var, OPT_Wuninitialized);
7230 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7232 tree val = DECL_VALUE_EXPR (new_var);
7233 if (TREE_CODE (val) == ARRAY_REF
7234 && VAR_P (TREE_OPERAND (val, 0))
7235 && lookup_attribute ("omp simd array",
7236 DECL_ATTRIBUTES (TREE_OPERAND (val,
7237 0))))
7239 if (lastlane == NULL)
7241 lastlane = create_tmp_var (unsigned_type_node);
7242 gcall *g
7243 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7244 2, simduid,
7245 TREE_OPERAND (val, 1));
7246 gimple_call_set_lhs (g, lastlane);
7247 gimple_seq_add_stmt (this_stmt_list, g);
7249 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7250 TREE_OPERAND (val, 0), lastlane,
7251 NULL_TREE, NULL_TREE);
7252 TREE_THIS_NOTRAP (new_var) = 1;
7255 else if (maybe_simt)
7257 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7258 ? DECL_VALUE_EXPR (new_var)
7259 : new_var);
7260 if (simtlast == NULL)
7262 simtlast = create_tmp_var (unsigned_type_node);
7263 gcall *g = gimple_build_call_internal
7264 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7265 gimple_call_set_lhs (g, simtlast);
7266 gimple_seq_add_stmt (this_stmt_list, g);
7268 x = build_call_expr_internal_loc
7269 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7270 TREE_TYPE (val), 2, val, simtlast);
7271 new_var = unshare_expr (new_var);
7272 gimplify_assign (new_var, x, this_stmt_list);
7273 new_var = unshare_expr (new_var);
7276 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7277 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7279 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7280 gimple_seq_add_seq (this_stmt_list,
7281 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7282 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7284 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7285 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7287 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7288 gimple_seq_add_seq (this_stmt_list,
7289 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7290 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7293 x = NULL_TREE;
7294 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7295 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7296 && is_taskloop_ctx (ctx))
7298 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7299 ctx->outer->outer);
7300 if (is_global_var (ovar))
7301 x = ovar;
7303 if (!x)
7304 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7305 if (omp_privatize_by_reference (var))
7306 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7307 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7308 gimplify_and_add (x, this_stmt_list);
7310 if (lab2)
7311 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7314 next:
7315 c = OMP_CLAUSE_CHAIN (c);
7316 if (c == NULL && !par_clauses)
7318 /* If this was a workshare clause, see if it had been combined
7319 with its parallel. In that case, continue looking for the
7320 clauses also on the parallel statement itself. */
7321 if (is_parallel_ctx (ctx))
7322 break;
7324 ctx = ctx->outer;
7325 if (ctx == NULL || !is_parallel_ctx (ctx))
7326 break;
7328 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7329 OMP_CLAUSE_LASTPRIVATE);
7330 par_clauses = true;
7334 if (label)
7335 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7336 gimple_seq_add_seq (stmt_list, post_stmt_list);
7339 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7340 (which might be a placeholder). INNER is true if this is an inner
7341 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7342 join markers. Generate the before-loop forking sequence in
7343 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7344 general form of these sequences is
7346 GOACC_REDUCTION_SETUP
7347 GOACC_FORK
7348 GOACC_REDUCTION_INIT
7350 GOACC_REDUCTION_FINI
7351 GOACC_JOIN
7352 GOACC_REDUCTION_TEARDOWN. */
7354 static void
7355 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7356 gcall *fork, gcall *private_marker, gcall *join,
7357 gimple_seq *fork_seq, gimple_seq *join_seq,
7358 omp_context *ctx)
7360 gimple_seq before_fork = NULL;
7361 gimple_seq after_fork = NULL;
7362 gimple_seq before_join = NULL;
7363 gimple_seq after_join = NULL;
7364 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7365 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7366 unsigned offset = 0;
7368 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7369 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7371 /* No 'reduction' clauses on OpenACC 'kernels'. */
7372 gcc_checking_assert (!is_oacc_kernels (ctx));
7373 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7374 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7376 tree orig = OMP_CLAUSE_DECL (c);
7377 tree var = maybe_lookup_decl (orig, ctx);
7378 tree ref_to_res = NULL_TREE;
7379 tree incoming, outgoing, v1, v2, v3;
7380 bool is_private = false;
7382 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7383 if (rcode == MINUS_EXPR)
7384 rcode = PLUS_EXPR;
7385 else if (rcode == TRUTH_ANDIF_EXPR)
7386 rcode = BIT_AND_EXPR;
7387 else if (rcode == TRUTH_ORIF_EXPR)
7388 rcode = BIT_IOR_EXPR;
7389 tree op = build_int_cst (unsigned_type_node, rcode);
7391 if (!var)
7392 var = orig;
7394 incoming = outgoing = var;
7396 if (!inner)
7398 /* See if an outer construct also reduces this variable. */
7399 omp_context *outer = ctx;
7401 while (omp_context *probe = outer->outer)
7403 enum gimple_code type = gimple_code (probe->stmt);
7404 tree cls;
7406 switch (type)
7408 case GIMPLE_OMP_FOR:
7409 cls = gimple_omp_for_clauses (probe->stmt);
7410 break;
7412 case GIMPLE_OMP_TARGET:
7413 /* No 'reduction' clauses inside OpenACC 'kernels'
7414 regions. */
7415 gcc_checking_assert (!is_oacc_kernels (probe));
7417 if (!is_gimple_omp_offloaded (probe->stmt))
7418 goto do_lookup;
7420 cls = gimple_omp_target_clauses (probe->stmt);
7421 break;
7423 default:
7424 goto do_lookup;
7427 outer = probe;
7428 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7429 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7430 && orig == OMP_CLAUSE_DECL (cls))
7432 incoming = outgoing = lookup_decl (orig, probe);
7433 goto has_outer_reduction;
7435 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7436 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7437 && orig == OMP_CLAUSE_DECL (cls))
7439 is_private = true;
7440 goto do_lookup;
7444 do_lookup:
7445 /* This is the outermost construct with this reduction,
7446 see if there's a mapping for it. */
7447 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7448 && maybe_lookup_field (orig, outer) && !is_private)
7450 ref_to_res = build_receiver_ref (orig, false, outer);
7451 if (omp_privatize_by_reference (orig))
7452 ref_to_res = build_simple_mem_ref (ref_to_res);
7454 tree type = TREE_TYPE (var);
7455 if (POINTER_TYPE_P (type))
7456 type = TREE_TYPE (type);
7458 outgoing = var;
7459 incoming = omp_reduction_init_op (loc, rcode, type);
7461 else
7463 /* Try to look at enclosing contexts for reduction var,
7464 use original if no mapping found. */
7465 tree t = NULL_TREE;
7466 omp_context *c = ctx->outer;
7467 while (c && !t)
7469 t = maybe_lookup_decl (orig, c);
7470 c = c->outer;
7472 incoming = outgoing = (t ? t : orig);
7475 has_outer_reduction:;
7478 if (!ref_to_res)
7479 ref_to_res = integer_zero_node;
7481 if (omp_privatize_by_reference (orig))
7483 tree type = TREE_TYPE (var);
7484 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7486 if (!inner)
7488 tree x = create_tmp_var (TREE_TYPE (type), id);
7489 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7492 v1 = create_tmp_var (type, id);
7493 v2 = create_tmp_var (type, id);
7494 v3 = create_tmp_var (type, id);
7496 gimplify_assign (v1, var, fork_seq);
7497 gimplify_assign (v2, var, fork_seq);
7498 gimplify_assign (v3, var, fork_seq);
7500 var = build_simple_mem_ref (var);
7501 v1 = build_simple_mem_ref (v1);
7502 v2 = build_simple_mem_ref (v2);
7503 v3 = build_simple_mem_ref (v3);
7504 outgoing = build_simple_mem_ref (outgoing);
7506 if (!TREE_CONSTANT (incoming))
7507 incoming = build_simple_mem_ref (incoming);
7509 else
7510 /* Note that 'var' might be a mem ref. */
7511 v1 = v2 = v3 = var;
7513 /* Determine position in reduction buffer, which may be used
7514 by target. The parser has ensured that this is not a
7515 variable-sized type. */
7516 fixed_size_mode mode
7517 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7518 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7519 offset = (offset + align - 1) & ~(align - 1);
7520 tree off = build_int_cst (sizetype, offset);
7521 offset += GET_MODE_SIZE (mode);
7523 if (!init_code)
7525 init_code = build_int_cst (integer_type_node,
7526 IFN_GOACC_REDUCTION_INIT);
7527 fini_code = build_int_cst (integer_type_node,
7528 IFN_GOACC_REDUCTION_FINI);
7529 setup_code = build_int_cst (integer_type_node,
7530 IFN_GOACC_REDUCTION_SETUP);
7531 teardown_code = build_int_cst (integer_type_node,
7532 IFN_GOACC_REDUCTION_TEARDOWN);
7535 tree setup_call
7536 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7537 TREE_TYPE (var), 6, setup_code,
7538 unshare_expr (ref_to_res),
7539 unshare_expr (incoming),
7540 level, op, off);
7541 tree init_call
7542 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7543 TREE_TYPE (var), 6, init_code,
7544 unshare_expr (ref_to_res),
7545 unshare_expr (v1), level, op, off);
7546 tree fini_call
7547 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7548 TREE_TYPE (var), 6, fini_code,
7549 unshare_expr (ref_to_res),
7550 unshare_expr (v2), level, op, off);
7551 tree teardown_call
7552 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7553 TREE_TYPE (var), 6, teardown_code,
7554 ref_to_res, unshare_expr (v3),
7555 level, op, off);
7557 gimplify_assign (unshare_expr (v1), setup_call, &before_fork);
7558 gimplify_assign (unshare_expr (v2), init_call, &after_fork);
7559 gimplify_assign (unshare_expr (v3), fini_call, &before_join);
7560 gimplify_assign (unshare_expr (outgoing), teardown_call, &after_join);
7563 /* Now stitch things together. */
7564 gimple_seq_add_seq (fork_seq, before_fork);
7565 if (private_marker)
7566 gimple_seq_add_stmt (fork_seq, private_marker);
7567 if (fork)
7568 gimple_seq_add_stmt (fork_seq, fork);
7569 gimple_seq_add_seq (fork_seq, after_fork);
7571 gimple_seq_add_seq (join_seq, before_join);
7572 if (join)
7573 gimple_seq_add_stmt (join_seq, join);
7574 gimple_seq_add_seq (join_seq, after_join);
7577 /* Generate code to implement the REDUCTION clauses, append it
7578 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7579 that should be emitted also inside of the critical section,
7580 in that case clear *CLIST afterwards, otherwise leave it as is
7581 and let the caller emit it itself. */
7583 static void
7584 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7585 gimple_seq *clist, omp_context *ctx)
7587 gimple_seq sub_seq = NULL;
7588 gimple *stmt;
7589 tree x, c;
7590 int count = 0;
7592 /* OpenACC loop reductions are handled elsewhere. */
7593 if (is_gimple_omp_oacc (ctx->stmt))
7594 return;
7596 /* SIMD reductions are handled in lower_rec_input_clauses. */
7597 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7598 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7599 return;
7601 /* inscan reductions are handled elsewhere. */
7602 if (ctx->scan_inclusive || ctx->scan_exclusive)
7603 return;
7605 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7606 update in that case, otherwise use a lock. */
7607 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7608 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7609 && !OMP_CLAUSE_REDUCTION_TASK (c))
7611 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7612 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7614 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7615 count = -1;
7616 break;
7618 count++;
7621 if (count == 0)
7622 return;
7624 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7626 tree var, ref, new_var, orig_var;
7627 enum tree_code code;
7628 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7630 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7631 || OMP_CLAUSE_REDUCTION_TASK (c))
7632 continue;
7634 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7635 orig_var = var = OMP_CLAUSE_DECL (c);
7636 if (TREE_CODE (var) == MEM_REF)
7638 var = TREE_OPERAND (var, 0);
7639 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7640 var = TREE_OPERAND (var, 0);
7641 if (TREE_CODE (var) == ADDR_EXPR)
7642 var = TREE_OPERAND (var, 0);
7643 else
7645 /* If this is a pointer or referenced based array
7646 section, the var could be private in the outer
7647 context e.g. on orphaned loop construct. Pretend this
7648 is private variable's outer reference. */
7649 ccode = OMP_CLAUSE_PRIVATE;
7650 if (INDIRECT_REF_P (var))
7651 var = TREE_OPERAND (var, 0);
7653 orig_var = var;
7654 if (is_variable_sized (var))
7656 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7657 var = DECL_VALUE_EXPR (var);
7658 gcc_assert (INDIRECT_REF_P (var));
7659 var = TREE_OPERAND (var, 0);
7660 gcc_assert (DECL_P (var));
7663 new_var = lookup_decl (var, ctx);
7664 if (var == OMP_CLAUSE_DECL (c)
7665 && omp_privatize_by_reference (var))
7666 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7667 ref = build_outer_var_ref (var, ctx, ccode);
7668 code = OMP_CLAUSE_REDUCTION_CODE (c);
7670 /* reduction(-:var) sums up the partial results, so it acts
7671 identically to reduction(+:var). */
7672 if (code == MINUS_EXPR)
7673 code = PLUS_EXPR;
7675 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7676 if (count == 1)
7678 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7680 addr = save_expr (addr);
7681 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7682 tree new_var2 = new_var;
7683 tree ref2 = ref;
7684 if (is_truth_op)
7686 tree zero = build_zero_cst (TREE_TYPE (new_var));
7687 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7688 boolean_type_node, new_var, zero);
7689 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7690 ref, zero);
7692 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7693 new_var2);
7694 if (is_truth_op)
7695 x = fold_convert (TREE_TYPE (new_var), x);
7696 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7697 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7698 gimplify_and_add (x, stmt_seqp);
7699 return;
7701 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7703 tree d = OMP_CLAUSE_DECL (c);
7704 tree type = TREE_TYPE (d);
7705 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7706 tree i = create_tmp_var (TREE_TYPE (v));
7707 tree ptype = build_pointer_type (TREE_TYPE (type));
7708 tree bias = TREE_OPERAND (d, 1);
7709 d = TREE_OPERAND (d, 0);
7710 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7712 tree b = TREE_OPERAND (d, 1);
7713 b = maybe_lookup_decl (b, ctx);
7714 if (b == NULL)
7716 b = TREE_OPERAND (d, 1);
7717 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7719 if (integer_zerop (bias))
7720 bias = b;
7721 else
7723 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7724 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7725 TREE_TYPE (b), b, bias);
7727 d = TREE_OPERAND (d, 0);
7729 /* For ref build_outer_var_ref already performs this, so
7730 only new_var needs a dereference. */
7731 if (INDIRECT_REF_P (d))
7733 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7734 gcc_assert (omp_privatize_by_reference (var)
7735 && var == orig_var);
7737 else if (TREE_CODE (d) == ADDR_EXPR)
7739 if (orig_var == var)
7741 new_var = build_fold_addr_expr (new_var);
7742 ref = build_fold_addr_expr (ref);
7745 else
7747 gcc_assert (orig_var == var);
7748 if (omp_privatize_by_reference (var))
7749 ref = build_fold_addr_expr (ref);
7751 if (DECL_P (v))
7753 tree t = maybe_lookup_decl (v, ctx);
7754 if (t)
7755 v = t;
7756 else
7757 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7758 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7760 if (!integer_zerop (bias))
7762 bias = fold_convert_loc (clause_loc, sizetype, bias);
7763 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7764 TREE_TYPE (new_var), new_var,
7765 unshare_expr (bias));
7766 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7767 TREE_TYPE (ref), ref, bias);
7769 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7770 ref = fold_convert_loc (clause_loc, ptype, ref);
7771 tree m = create_tmp_var (ptype);
7772 gimplify_assign (m, new_var, stmt_seqp);
7773 new_var = m;
7774 m = create_tmp_var (ptype);
7775 gimplify_assign (m, ref, stmt_seqp);
7776 ref = m;
7777 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7778 tree body = create_artificial_label (UNKNOWN_LOCATION);
7779 tree end = create_artificial_label (UNKNOWN_LOCATION);
7780 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7781 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7782 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7783 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7785 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7786 tree decl_placeholder
7787 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7788 SET_DECL_VALUE_EXPR (placeholder, out);
7789 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7790 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7791 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7792 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7793 gimple_seq_add_seq (&sub_seq,
7794 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7795 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7796 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7797 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7799 else
7801 tree out2 = out;
7802 tree priv2 = priv;
7803 if (is_truth_op)
7805 tree zero = build_zero_cst (TREE_TYPE (out));
7806 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7807 boolean_type_node, out, zero);
7808 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7809 boolean_type_node, priv, zero);
7811 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7812 if (is_truth_op)
7813 x = fold_convert (TREE_TYPE (out), x);
7814 out = unshare_expr (out);
7815 gimplify_assign (out, x, &sub_seq);
7817 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7818 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7819 gimple_seq_add_stmt (&sub_seq, g);
7820 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7821 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7822 gimple_seq_add_stmt (&sub_seq, g);
7823 g = gimple_build_assign (i, PLUS_EXPR, i,
7824 build_int_cst (TREE_TYPE (i), 1));
7825 gimple_seq_add_stmt (&sub_seq, g);
7826 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7827 gimple_seq_add_stmt (&sub_seq, g);
7828 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7830 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7832 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7834 if (omp_privatize_by_reference (var)
7835 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7836 TREE_TYPE (ref)))
7837 ref = build_fold_addr_expr_loc (clause_loc, ref);
7838 SET_DECL_VALUE_EXPR (placeholder, ref);
7839 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7840 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7841 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7842 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7843 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7845 else
7847 tree new_var2 = new_var;
7848 tree ref2 = ref;
7849 if (is_truth_op)
7851 tree zero = build_zero_cst (TREE_TYPE (new_var));
7852 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7853 boolean_type_node, new_var, zero);
7854 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7855 ref, zero);
7857 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7858 if (is_truth_op)
7859 x = fold_convert (TREE_TYPE (new_var), x);
7860 ref = build_outer_var_ref (var, ctx);
7861 gimplify_assign (ref, x, &sub_seq);
7865 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7867 gimple_seq_add_stmt (stmt_seqp, stmt);
7869 gimple_seq_add_seq (stmt_seqp, sub_seq);
7871 if (clist)
7873 gimple_seq_add_seq (stmt_seqp, *clist);
7874 *clist = NULL;
7877 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7879 gimple_seq_add_stmt (stmt_seqp, stmt);
7883 /* Generate code to implement the COPYPRIVATE clauses. */
7885 static void
7886 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7887 omp_context *ctx)
7889 tree c;
7891 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7893 tree var, new_var, ref, x;
7894 bool by_ref;
7895 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7897 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7898 continue;
7900 var = OMP_CLAUSE_DECL (c);
7901 by_ref = use_pointer_for_field (var, NULL);
7903 ref = build_sender_ref (var, ctx);
7904 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7905 if (by_ref)
7907 x = build_fold_addr_expr_loc (clause_loc, new_var);
7908 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7910 gimplify_assign (ref, x, slist);
7912 ref = build_receiver_ref (var, false, ctx);
7913 if (by_ref)
7915 ref = fold_convert_loc (clause_loc,
7916 build_pointer_type (TREE_TYPE (new_var)),
7917 ref);
7918 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7920 if (omp_privatize_by_reference (var))
7922 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7923 ref = build_simple_mem_ref_loc (clause_loc, ref);
7924 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7926 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7927 gimplify_and_add (x, rlist);
7932 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7933 and REDUCTION from the sender (aka parent) side. */
7935 static void
7936 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7937 omp_context *ctx)
7939 tree c, t;
7940 int ignored_looptemp = 0;
7941 bool is_taskloop = false;
7943 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7944 by GOMP_taskloop. */
7945 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7947 ignored_looptemp = 2;
7948 is_taskloop = true;
7951 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7953 tree val, ref, x, var;
7954 bool by_ref, do_in = false, do_out = false;
7955 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7957 switch (OMP_CLAUSE_CODE (c))
7959 case OMP_CLAUSE_PRIVATE:
7960 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7961 break;
7962 continue;
7963 case OMP_CLAUSE_FIRSTPRIVATE:
7964 case OMP_CLAUSE_COPYIN:
7965 case OMP_CLAUSE_LASTPRIVATE:
7966 case OMP_CLAUSE_IN_REDUCTION:
7967 case OMP_CLAUSE__REDUCTEMP_:
7968 break;
7969 case OMP_CLAUSE_REDUCTION:
7970 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7971 continue;
7972 break;
7973 case OMP_CLAUSE_SHARED:
7974 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7975 break;
7976 continue;
7977 case OMP_CLAUSE__LOOPTEMP_:
7978 if (ignored_looptemp)
7980 ignored_looptemp--;
7981 continue;
7983 break;
7984 default:
7985 continue;
7988 val = OMP_CLAUSE_DECL (c);
7989 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7990 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7991 && TREE_CODE (val) == MEM_REF)
7993 val = TREE_OPERAND (val, 0);
7994 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7995 val = TREE_OPERAND (val, 0);
7996 if (INDIRECT_REF_P (val)
7997 || TREE_CODE (val) == ADDR_EXPR)
7998 val = TREE_OPERAND (val, 0);
7999 if (is_variable_sized (val))
8000 continue;
8003 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8004 outer taskloop region. */
8005 omp_context *ctx_for_o = ctx;
8006 if (is_taskloop
8007 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8008 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8009 ctx_for_o = ctx->outer;
8011 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
8013 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8014 && is_global_var (var)
8015 && (val == OMP_CLAUSE_DECL (c)
8016 || !is_task_ctx (ctx)
8017 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8018 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8019 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8020 != POINTER_TYPE)))))
8021 continue;
8023 t = omp_member_access_dummy_var (var);
8024 if (t)
8026 var = DECL_VALUE_EXPR (var);
8027 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
8028 if (o != t)
8029 var = unshare_and_remap (var, t, o);
8030 else
8031 var = unshare_expr (var);
8034 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8036 /* Handle taskloop firstprivate/lastprivate, where the
8037 lastprivate on GIMPLE_OMP_TASK is represented as
8038 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8039 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
8040 x = omp_build_component_ref (ctx->sender_decl, f);
8041 if (use_pointer_for_field (val, ctx))
8042 var = build_fold_addr_expr (var);
8043 gimplify_assign (x, var, ilist);
8044 DECL_ABSTRACT_ORIGIN (f) = NULL;
8045 continue;
8048 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8049 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8050 || val == OMP_CLAUSE_DECL (c))
8051 && is_variable_sized (val))
8052 continue;
8053 by_ref = use_pointer_for_field (val, NULL);
8055 switch (OMP_CLAUSE_CODE (c))
8057 case OMP_CLAUSE_FIRSTPRIVATE:
8058 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8059 && !by_ref
8060 && is_task_ctx (ctx))
8061 suppress_warning (var);
8062 do_in = true;
8063 break;
8065 case OMP_CLAUSE_PRIVATE:
8066 case OMP_CLAUSE_COPYIN:
8067 case OMP_CLAUSE__LOOPTEMP_:
8068 case OMP_CLAUSE__REDUCTEMP_:
8069 do_in = true;
8070 break;
8072 case OMP_CLAUSE_LASTPRIVATE:
8073 if (by_ref || omp_privatize_by_reference (val))
8075 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8076 continue;
8077 do_in = true;
8079 else
8081 do_out = true;
8082 if (lang_hooks.decls.omp_private_outer_ref (val))
8083 do_in = true;
8085 break;
8087 case OMP_CLAUSE_REDUCTION:
8088 case OMP_CLAUSE_IN_REDUCTION:
8089 do_in = true;
8090 if (val == OMP_CLAUSE_DECL (c))
8092 if (is_task_ctx (ctx))
8093 by_ref = use_pointer_for_field (val, ctx);
8094 else
8095 do_out = !(by_ref || omp_privatize_by_reference (val));
8097 else
8098 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8099 break;
8101 default:
8102 gcc_unreachable ();
8105 if (do_in)
8107 ref = build_sender_ref (val, ctx);
8108 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8109 gimplify_assign (ref, x, ilist);
8110 if (is_task_ctx (ctx))
8111 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8114 if (do_out)
8116 ref = build_sender_ref (val, ctx);
8117 gimplify_assign (var, ref, olist);
8122 /* Generate code to implement SHARED from the sender (aka parent)
8123 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8124 list things that got automatically shared. */
8126 static void
8127 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8129 tree var, ovar, nvar, t, f, x, record_type;
8131 if (ctx->record_type == NULL)
8132 return;
8134 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8135 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8137 ovar = DECL_ABSTRACT_ORIGIN (f);
8138 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8139 continue;
8141 nvar = maybe_lookup_decl (ovar, ctx);
8142 if (!nvar
8143 || !DECL_HAS_VALUE_EXPR_P (nvar)
8144 || (ctx->allocate_map
8145 && ctx->allocate_map->get (ovar)))
8146 continue;
8148 /* If CTX is a nested parallel directive. Find the immediately
8149 enclosing parallel or workshare construct that contains a
8150 mapping for OVAR. */
8151 var = lookup_decl_in_outer_ctx (ovar, ctx);
8153 t = omp_member_access_dummy_var (var);
8154 if (t)
8156 var = DECL_VALUE_EXPR (var);
8157 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8158 if (o != t)
8159 var = unshare_and_remap (var, t, o);
8160 else
8161 var = unshare_expr (var);
8164 if (use_pointer_for_field (ovar, ctx))
8166 x = build_sender_ref (ovar, ctx);
8167 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8168 && TREE_TYPE (f) == TREE_TYPE (ovar))
8170 gcc_assert (is_parallel_ctx (ctx)
8171 && DECL_ARTIFICIAL (ovar));
8172 /* _condtemp_ clause. */
8173 var = build_constructor (TREE_TYPE (x), NULL);
8175 else
8176 var = build_fold_addr_expr (var);
8177 gimplify_assign (x, var, ilist);
8179 else
8181 x = build_sender_ref (ovar, ctx);
8182 gimplify_assign (x, var, ilist);
8184 if (!TREE_READONLY (var)
8185 /* We don't need to receive a new reference to a result
8186 or parm decl. In fact we may not store to it as we will
8187 invalidate any pending RSO and generate wrong gimple
8188 during inlining. */
8189 && !((TREE_CODE (var) == RESULT_DECL
8190 || TREE_CODE (var) == PARM_DECL)
8191 && DECL_BY_REFERENCE (var)))
8193 x = build_sender_ref (ovar, ctx);
8194 gimplify_assign (var, x, olist);
8200 /* Emit an OpenACC head marker call, encapulating the partitioning and
8201 other information that must be processed by the target compiler.
8202 Return the maximum number of dimensions the associated loop might
8203 be partitioned over. */
8205 static unsigned
8206 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8207 gimple_seq *seq, omp_context *ctx)
8209 unsigned levels = 0;
8210 unsigned tag = 0;
8211 tree gang_static = NULL_TREE;
8212 auto_vec<tree, 5> args;
8214 args.quick_push (build_int_cst
8215 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8216 args.quick_push (ddvar);
8217 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8219 switch (OMP_CLAUSE_CODE (c))
8221 case OMP_CLAUSE_GANG:
8222 tag |= OLF_DIM_GANG;
8223 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8224 /* static:* is represented by -1, and we can ignore it, as
8225 scheduling is always static. */
8226 if (gang_static && integer_minus_onep (gang_static))
8227 gang_static = NULL_TREE;
8228 levels++;
8229 break;
8231 case OMP_CLAUSE_WORKER:
8232 tag |= OLF_DIM_WORKER;
8233 levels++;
8234 break;
8236 case OMP_CLAUSE_VECTOR:
8237 tag |= OLF_DIM_VECTOR;
8238 levels++;
8239 break;
8241 case OMP_CLAUSE_SEQ:
8242 tag |= OLF_SEQ;
8243 break;
8245 case OMP_CLAUSE_AUTO:
8246 tag |= OLF_AUTO;
8247 break;
8249 case OMP_CLAUSE_INDEPENDENT:
8250 tag |= OLF_INDEPENDENT;
8251 break;
8253 case OMP_CLAUSE_TILE:
8254 tag |= OLF_TILE;
8255 break;
8257 case OMP_CLAUSE_REDUCTION:
8258 tag |= OLF_REDUCTION;
8259 break;
8261 default:
8262 continue;
8266 if (gang_static)
8268 if (DECL_P (gang_static))
8269 gang_static = build_outer_var_ref (gang_static, ctx);
8270 tag |= OLF_GANG_STATIC;
8273 omp_context *tgt = enclosing_target_ctx (ctx);
8274 if (!tgt || is_oacc_parallel_or_serial (tgt))
8276 else if (is_oacc_kernels (tgt))
8277 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8278 gcc_unreachable ();
8279 else if (is_oacc_kernels_decomposed_part (tgt))
8281 else
8282 gcc_unreachable ();
8284 /* In a parallel region, loops are implicitly INDEPENDENT. */
8285 if (!tgt || is_oacc_parallel_or_serial (tgt))
8286 tag |= OLF_INDEPENDENT;
8288 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8289 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8290 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8292 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8293 gcc_assert (!(tag & OLF_AUTO));
8296 if (tag & OLF_TILE)
8297 /* Tiling could use all 3 levels. */
8298 levels = 3;
8299 else
8301 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8302 Ensure at least one level, or 2 for possible auto
8303 partitioning */
8304 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8305 << OLF_DIM_BASE) | OLF_SEQ));
8307 if (levels < 1u + maybe_auto)
8308 levels = 1u + maybe_auto;
8311 args.quick_push (build_int_cst (integer_type_node, levels));
8312 args.quick_push (build_int_cst (integer_type_node, tag));
8313 if (gang_static)
8314 args.quick_push (gang_static);
8316 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8317 gimple_set_location (call, loc);
8318 gimple_set_lhs (call, ddvar);
8319 gimple_seq_add_stmt (seq, call);
8321 return levels;
8324 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8325 partitioning level of the enclosed region. */
8327 static void
8328 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8329 tree tofollow, gimple_seq *seq)
8331 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8332 : IFN_UNIQUE_OACC_TAIL_MARK);
8333 tree marker = build_int_cst (integer_type_node, marker_kind);
8334 int nargs = 2 + (tofollow != NULL_TREE);
8335 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8336 marker, ddvar, tofollow);
8337 gimple_set_location (call, loc);
8338 gimple_set_lhs (call, ddvar);
8339 gimple_seq_add_stmt (seq, call);
8342 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8343 the loop clauses, from which we extract reductions. Initialize
8344 HEAD and TAIL. */
8346 static void
8347 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8348 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8350 bool inner = false;
8351 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8352 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8354 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8356 if (private_marker)
8358 gimple_set_location (private_marker, loc);
8359 gimple_call_set_lhs (private_marker, ddvar);
8360 gimple_call_set_arg (private_marker, 1, ddvar);
8363 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8364 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8366 gcc_assert (count);
8367 for (unsigned done = 1; count; count--, done++)
8369 gimple_seq fork_seq = NULL;
8370 gimple_seq join_seq = NULL;
8372 tree place = build_int_cst (integer_type_node, -1);
8373 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8374 fork_kind, ddvar, place);
8375 gimple_set_location (fork, loc);
8376 gimple_set_lhs (fork, ddvar);
8378 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8379 join_kind, ddvar, place);
8380 gimple_set_location (join, loc);
8381 gimple_set_lhs (join, ddvar);
8383 /* Mark the beginning of this level sequence. */
8384 if (inner)
8385 lower_oacc_loop_marker (loc, ddvar, true,
8386 build_int_cst (integer_type_node, count),
8387 &fork_seq);
8388 lower_oacc_loop_marker (loc, ddvar, false,
8389 build_int_cst (integer_type_node, done),
8390 &join_seq);
8392 lower_oacc_reductions (loc, clauses, place, inner,
8393 fork, (count == 1) ? private_marker : NULL,
8394 join, &fork_seq, &join_seq, ctx);
8396 /* Append this level to head. */
8397 gimple_seq_add_seq (head, fork_seq);
8398 /* Prepend it to tail. */
8399 gimple_seq_add_seq (&join_seq, *tail);
8400 *tail = join_seq;
8402 inner = true;
8405 /* Mark the end of the sequence. */
8406 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8407 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8410 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8411 catch handler and return it. This prevents programs from violating the
8412 structured block semantics with throws. */
8414 static gimple_seq
8415 maybe_catch_exception (gimple_seq body)
8417 gimple *g;
8418 tree decl;
8420 if (!flag_exceptions)
8421 return body;
8423 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8424 decl = lang_hooks.eh_protect_cleanup_actions ();
8425 else
8426 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8428 g = gimple_build_eh_must_not_throw (decl);
8429 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8430 GIMPLE_TRY_CATCH);
8432 return gimple_seq_alloc_with_stmt (g);
8436 /* Routines to lower OMP directives into OMP-GIMPLE. */
8438 /* If ctx is a worksharing context inside of a cancellable parallel
8439 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8440 and conditional branch to parallel's cancel_label to handle
8441 cancellation in the implicit barrier. */
8443 static void
8444 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8445 gimple_seq *body)
8447 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8448 if (gimple_omp_return_nowait_p (omp_return))
8449 return;
8450 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8451 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8452 && outer->cancellable)
8454 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8455 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8456 tree lhs = create_tmp_var (c_bool_type);
8457 gimple_omp_return_set_lhs (omp_return, lhs);
8458 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8459 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8460 fold_convert (c_bool_type,
8461 boolean_false_node),
8462 outer->cancel_label, fallthru_label);
8463 gimple_seq_add_stmt (body, g);
8464 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8466 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8467 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8468 return;
8471 /* Find the first task_reduction or reduction clause or return NULL
8472 if there are none. */
8474 static inline tree
8475 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8476 enum omp_clause_code ccode)
8478 while (1)
8480 clauses = omp_find_clause (clauses, ccode);
8481 if (clauses == NULL_TREE)
8482 return NULL_TREE;
8483 if (ccode != OMP_CLAUSE_REDUCTION
8484 || code == OMP_TASKLOOP
8485 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8486 return clauses;
8487 clauses = OMP_CLAUSE_CHAIN (clauses);
8491 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8492 gimple_seq *, gimple_seq *);
8494 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8495 CTX is the enclosing OMP context for the current statement. */
8497 static void
8498 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8500 tree block, control;
8501 gimple_stmt_iterator tgsi;
8502 gomp_sections *stmt;
8503 gimple *t;
8504 gbind *new_stmt, *bind;
8505 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8507 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8509 push_gimplify_context ();
8511 dlist = NULL;
8512 ilist = NULL;
8514 tree rclauses
8515 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8516 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8517 tree rtmp = NULL_TREE;
8518 if (rclauses)
8520 tree type = build_pointer_type (pointer_sized_int_node);
8521 tree temp = create_tmp_var (type);
8522 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8523 OMP_CLAUSE_DECL (c) = temp;
8524 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8525 gimple_omp_sections_set_clauses (stmt, c);
8526 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8527 gimple_omp_sections_clauses (stmt),
8528 &ilist, &tred_dlist);
8529 rclauses = c;
8530 rtmp = make_ssa_name (type);
8531 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8534 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8535 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8537 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8538 &ilist, &dlist, ctx, NULL);
8540 control = create_tmp_var (unsigned_type_node, ".section");
8541 gimple_omp_sections_set_control (stmt, control);
8543 new_body = gimple_omp_body (stmt);
8544 gimple_omp_set_body (stmt, NULL);
8545 tgsi = gsi_start (new_body);
8546 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8548 omp_context *sctx;
8549 gimple *sec_start;
8551 sec_start = gsi_stmt (tgsi);
8552 sctx = maybe_lookup_ctx (sec_start);
8553 gcc_assert (sctx);
8555 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8556 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8557 GSI_CONTINUE_LINKING);
8558 gimple_omp_set_body (sec_start, NULL);
8560 if (gsi_one_before_end_p (tgsi))
8562 gimple_seq l = NULL;
8563 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8564 &ilist, &l, &clist, ctx);
8565 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8566 gimple_omp_section_set_last (sec_start);
8569 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8570 GSI_CONTINUE_LINKING);
8573 block = make_node (BLOCK);
8574 bind = gimple_build_bind (NULL, new_body, block);
8576 olist = NULL;
8577 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8578 &clist, ctx);
8579 if (clist)
8581 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8582 gcall *g = gimple_build_call (fndecl, 0);
8583 gimple_seq_add_stmt (&olist, g);
8584 gimple_seq_add_seq (&olist, clist);
8585 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8586 g = gimple_build_call (fndecl, 0);
8587 gimple_seq_add_stmt (&olist, g);
8590 block = make_node (BLOCK);
8591 new_stmt = gimple_build_bind (NULL, NULL, block);
8592 gsi_replace (gsi_p, new_stmt, true);
8594 pop_gimplify_context (new_stmt);
8595 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8596 BLOCK_VARS (block) = gimple_bind_vars (bind);
8597 if (BLOCK_VARS (block))
8598 TREE_USED (block) = 1;
8600 new_body = NULL;
8601 gimple_seq_add_seq (&new_body, ilist);
8602 gimple_seq_add_stmt (&new_body, stmt);
8603 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8604 gimple_seq_add_stmt (&new_body, bind);
8606 t = gimple_build_omp_continue (control, control);
8607 gimple_seq_add_stmt (&new_body, t);
8609 gimple_seq_add_seq (&new_body, olist);
8610 if (ctx->cancellable)
8611 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8612 gimple_seq_add_seq (&new_body, dlist);
8614 new_body = maybe_catch_exception (new_body);
8616 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8617 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8618 t = gimple_build_omp_return (nowait);
8619 gimple_seq_add_stmt (&new_body, t);
8620 gimple_seq_add_seq (&new_body, tred_dlist);
8621 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8623 if (rclauses)
8624 OMP_CLAUSE_DECL (rclauses) = rtmp;
8626 gimple_bind_set_body (new_stmt, new_body);
8630 /* A subroutine of lower_omp_single. Expand the simple form of
8631 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8633 if (GOMP_single_start ())
8634 BODY;
8635 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8637 FIXME. It may be better to delay expanding the logic of this until
8638 pass_expand_omp. The expanded logic may make the job more difficult
8639 to a synchronization analysis pass. */
8641 static void
8642 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8644 location_t loc = gimple_location (single_stmt);
8645 tree tlabel = create_artificial_label (loc);
8646 tree flabel = create_artificial_label (loc);
8647 gimple *call, *cond;
8648 tree lhs, decl;
8650 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8651 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8652 call = gimple_build_call (decl, 0);
8653 gimple_call_set_lhs (call, lhs);
8654 gimple_seq_add_stmt (pre_p, call);
8656 cond = gimple_build_cond (EQ_EXPR, lhs,
8657 fold_convert_loc (loc, TREE_TYPE (lhs),
8658 boolean_true_node),
8659 tlabel, flabel);
8660 gimple_seq_add_stmt (pre_p, cond);
8661 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8662 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8663 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8667 /* A subroutine of lower_omp_single. Expand the simple form of
8668 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8670 #pragma omp single copyprivate (a, b, c)
8672 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8675 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8677 BODY;
8678 copyout.a = a;
8679 copyout.b = b;
8680 copyout.c = c;
8681 GOMP_single_copy_end (&copyout);
8683 else
8685 a = copyout_p->a;
8686 b = copyout_p->b;
8687 c = copyout_p->c;
8689 GOMP_barrier ();
8692 FIXME. It may be better to delay expanding the logic of this until
8693 pass_expand_omp. The expanded logic may make the job more difficult
8694 to a synchronization analysis pass. */
8696 static void
8697 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8698 omp_context *ctx)
8700 tree ptr_type, t, l0, l1, l2, bfn_decl;
8701 gimple_seq copyin_seq;
8702 location_t loc = gimple_location (single_stmt);
8704 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8706 ptr_type = build_pointer_type (ctx->record_type);
8707 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8709 l0 = create_artificial_label (loc);
8710 l1 = create_artificial_label (loc);
8711 l2 = create_artificial_label (loc);
8713 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8714 t = build_call_expr_loc (loc, bfn_decl, 0);
8715 t = fold_convert_loc (loc, ptr_type, t);
8716 gimplify_assign (ctx->receiver_decl, t, pre_p);
8718 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8719 build_int_cst (ptr_type, 0));
8720 t = build3 (COND_EXPR, void_type_node, t,
8721 build_and_jump (&l0), build_and_jump (&l1));
8722 gimplify_and_add (t, pre_p);
8724 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8726 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8728 copyin_seq = NULL;
8729 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8730 &copyin_seq, ctx);
8732 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8733 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8734 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8735 gimplify_and_add (t, pre_p);
8737 t = build_and_jump (&l2);
8738 gimplify_and_add (t, pre_p);
8740 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8742 gimple_seq_add_seq (pre_p, copyin_seq);
8744 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8748 /* Expand code for an OpenMP single directive. */
8750 static void
8751 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8753 tree block;
8754 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8755 gbind *bind;
8756 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8758 push_gimplify_context ();
8760 block = make_node (BLOCK);
8761 bind = gimple_build_bind (NULL, NULL, block);
8762 gsi_replace (gsi_p, bind, true);
8763 bind_body = NULL;
8764 dlist = NULL;
8765 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8766 &bind_body, &dlist, ctx, NULL);
8767 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8769 gimple_seq_add_stmt (&bind_body, single_stmt);
8771 if (ctx->record_type)
8772 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8773 else
8774 lower_omp_single_simple (single_stmt, &bind_body);
8776 gimple_omp_set_body (single_stmt, NULL);
8778 gimple_seq_add_seq (&bind_body, dlist);
8780 bind_body = maybe_catch_exception (bind_body);
8782 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8783 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8784 gimple *g = gimple_build_omp_return (nowait);
8785 gimple_seq_add_stmt (&bind_body_tail, g);
8786 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8787 if (ctx->record_type)
8789 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8790 tree clobber = build_clobber (ctx->record_type);
8791 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8792 clobber), GSI_SAME_STMT);
8794 gimple_seq_add_seq (&bind_body, bind_body_tail);
8795 gimple_bind_set_body (bind, bind_body);
8797 pop_gimplify_context (bind);
8799 gimple_bind_append_vars (bind, ctx->block_vars);
8800 BLOCK_VARS (block) = ctx->block_vars;
8801 if (BLOCK_VARS (block))
8802 TREE_USED (block) = 1;
8806 /* Lower code for an OMP scope directive. */
8808 static void
8809 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8811 tree block;
8812 gimple *scope_stmt = gsi_stmt (*gsi_p);
8813 gbind *bind;
8814 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8815 gimple_seq tred_dlist = NULL;
8817 push_gimplify_context ();
8819 block = make_node (BLOCK);
8820 bind = gimple_build_bind (NULL, NULL, block);
8821 gsi_replace (gsi_p, bind, true);
8822 bind_body = NULL;
8823 dlist = NULL;
8825 tree rclauses
8826 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8827 OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8828 if (rclauses)
8830 tree type = build_pointer_type (pointer_sized_int_node);
8831 tree temp = create_tmp_var (type);
8832 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8833 OMP_CLAUSE_DECL (c) = temp;
8834 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8835 gimple_omp_scope_set_clauses (scope_stmt, c);
8836 lower_omp_task_reductions (ctx, OMP_SCOPE,
8837 gimple_omp_scope_clauses (scope_stmt),
8838 &bind_body, &tred_dlist);
8839 rclauses = c;
8840 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8841 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8842 gimple_seq_add_stmt (&bind_body, stmt);
8845 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8846 &bind_body, &dlist, ctx, NULL);
8847 lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8849 gimple_seq_add_stmt (&bind_body, scope_stmt);
8851 gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8853 gimple_omp_set_body (scope_stmt, NULL);
8855 gimple_seq clist = NULL;
8856 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8857 &bind_body, &clist, ctx);
8858 if (clist)
8860 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8861 gcall *g = gimple_build_call (fndecl, 0);
8862 gimple_seq_add_stmt (&bind_body, g);
8863 gimple_seq_add_seq (&bind_body, clist);
8864 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8865 g = gimple_build_call (fndecl, 0);
8866 gimple_seq_add_stmt (&bind_body, g);
8869 gimple_seq_add_seq (&bind_body, dlist);
8871 bind_body = maybe_catch_exception (bind_body);
8873 bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
8874 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8875 gimple *g = gimple_build_omp_return (nowait);
8876 gimple_seq_add_stmt (&bind_body_tail, g);
8877 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
8878 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8879 if (ctx->record_type)
8881 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8882 tree clobber = build_clobber (ctx->record_type);
8883 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8884 clobber), GSI_SAME_STMT);
8886 gimple_seq_add_seq (&bind_body, bind_body_tail);
8888 gimple_bind_set_body (bind, bind_body);
8890 pop_gimplify_context (bind);
8892 gimple_bind_append_vars (bind, ctx->block_vars);
8893 BLOCK_VARS (block) = ctx->block_vars;
8894 if (BLOCK_VARS (block))
8895 TREE_USED (block) = 1;
8897 /* Expand code for an OpenMP master or masked directive. */
8899 static void
8900 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8902 tree block, lab = NULL, x, bfn_decl;
8903 gimple *stmt = gsi_stmt (*gsi_p);
8904 gbind *bind;
8905 location_t loc = gimple_location (stmt);
8906 gimple_seq tseq;
8907 tree filter = integer_zero_node;
8909 push_gimplify_context ();
8911 if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
8913 filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
8914 OMP_CLAUSE_FILTER);
8915 if (filter)
8916 filter = fold_convert (integer_type_node,
8917 OMP_CLAUSE_FILTER_EXPR (filter));
8918 else
8919 filter = integer_zero_node;
8921 block = make_node (BLOCK);
8922 bind = gimple_build_bind (NULL, NULL, block);
8923 gsi_replace (gsi_p, bind, true);
8924 gimple_bind_add_stmt (bind, stmt);
8926 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8927 x = build_call_expr_loc (loc, bfn_decl, 0);
8928 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
8929 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8930 tseq = NULL;
8931 gimplify_and_add (x, &tseq);
8932 gimple_bind_add_seq (bind, tseq);
8934 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8935 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8936 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8937 gimple_omp_set_body (stmt, NULL);
8939 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8941 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8943 pop_gimplify_context (bind);
8945 gimple_bind_append_vars (bind, ctx->block_vars);
8946 BLOCK_VARS (block) = ctx->block_vars;
8949 /* Helper function for lower_omp_task_reductions. For a specific PASS
8950 find out the current clause it should be processed, or return false
8951 if all have been processed already. */
8953 static inline bool
8954 omp_task_reduction_iterate (int pass, enum tree_code code,
8955 enum omp_clause_code ccode, tree *c, tree *decl,
8956 tree *type, tree *next)
8958 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8960 if (ccode == OMP_CLAUSE_REDUCTION
8961 && code != OMP_TASKLOOP
8962 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8963 continue;
8964 *decl = OMP_CLAUSE_DECL (*c);
8965 *type = TREE_TYPE (*decl);
8966 if (TREE_CODE (*decl) == MEM_REF)
8968 if (pass != 1)
8969 continue;
8971 else
8973 if (omp_privatize_by_reference (*decl))
8974 *type = TREE_TYPE (*type);
8975 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8976 continue;
8978 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8979 return true;
8981 *decl = NULL_TREE;
8982 *type = NULL_TREE;
8983 *next = NULL_TREE;
8984 return false;
8987 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8988 OMP_TASKGROUP only with task modifier). Register mapping of those in
8989 START sequence and reducing them and unregister them in the END sequence. */
8991 static void
8992 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8993 gimple_seq *start, gimple_seq *end)
8995 enum omp_clause_code ccode
8996 = (code == OMP_TASKGROUP
8997 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8998 tree cancellable = NULL_TREE;
8999 clauses = omp_task_reductions_find_first (clauses, code, ccode);
9000 if (clauses == NULL_TREE)
9001 return;
9002 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9004 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
9005 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
9006 && outer->cancellable)
9008 cancellable = error_mark_node;
9009 break;
9011 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
9012 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
9013 break;
9015 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
9016 tree *last = &TYPE_FIELDS (record_type);
9017 unsigned cnt = 0;
9018 if (cancellable)
9020 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9021 ptr_type_node);
9022 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9023 integer_type_node);
9024 *last = field;
9025 DECL_CHAIN (field) = ifield;
9026 last = &DECL_CHAIN (ifield);
9027 DECL_CONTEXT (field) = record_type;
9028 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9029 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9030 DECL_CONTEXT (ifield) = record_type;
9031 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9032 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9034 for (int pass = 0; pass < 2; pass++)
9036 tree decl, type, next;
9037 for (tree c = clauses;
9038 omp_task_reduction_iterate (pass, code, ccode,
9039 &c, &decl, &type, &next); c = next)
9041 ++cnt;
9042 tree new_type = type;
9043 if (ctx->outer)
9044 new_type = remap_type (type, &ctx->outer->cb);
9045 tree field
9046 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9047 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9048 new_type);
9049 if (DECL_P (decl) && type == TREE_TYPE (decl))
9051 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9052 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9053 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9055 else
9056 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9057 DECL_CONTEXT (field) = record_type;
9058 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9059 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9060 *last = field;
9061 last = &DECL_CHAIN (field);
9062 tree bfield
9063 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9064 boolean_type_node);
9065 DECL_CONTEXT (bfield) = record_type;
9066 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9067 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9068 *last = bfield;
9069 last = &DECL_CHAIN (bfield);
9072 *last = NULL_TREE;
9073 layout_type (record_type);
9075 /* Build up an array which registers with the runtime all the reductions
9076 and deregisters them at the end. Format documented in libgomp/task.c. */
9077 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9078 tree avar = create_tmp_var_raw (atype);
9079 gimple_add_tmp_var (avar);
9080 TREE_ADDRESSABLE (avar) = 1;
9081 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9082 NULL_TREE, NULL_TREE);
9083 tree t = build_int_cst (pointer_sized_int_node, cnt);
9084 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9085 gimple_seq seq = NULL;
9086 tree sz = fold_convert (pointer_sized_int_node,
9087 TYPE_SIZE_UNIT (record_type));
9088 int cachesz = 64;
9089 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9090 build_int_cst (pointer_sized_int_node, cachesz - 1));
9091 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9092 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9093 ctx->task_reductions.create (1 + cnt);
9094 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9095 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9096 ? sz : NULL_TREE);
9097 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9098 gimple_seq_add_seq (start, seq);
9099 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9100 NULL_TREE, NULL_TREE);
9101 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9102 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9103 NULL_TREE, NULL_TREE);
9104 t = build_int_cst (pointer_sized_int_node,
9105 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9106 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9107 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9108 NULL_TREE, NULL_TREE);
9109 t = build_int_cst (pointer_sized_int_node, -1);
9110 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9111 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9112 NULL_TREE, NULL_TREE);
9113 t = build_int_cst (pointer_sized_int_node, 0);
9114 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9116 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9117 and for each task reduction checks a bool right after the private variable
9118 within that thread's chunk; if the bool is clear, it hasn't been
9119 initialized and thus isn't going to be reduced nor destructed, otherwise
9120 reduce and destruct it. */
9121 tree idx = create_tmp_var (size_type_node);
9122 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9123 tree num_thr_sz = create_tmp_var (size_type_node);
9124 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9125 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9126 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9127 gimple *g;
9128 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9130 /* For worksharing constructs or scope, only perform it in the master
9131 thread, with the exception of cancelled implicit barriers - then only
9132 handle the current thread. */
9133 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9134 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9135 tree thr_num = create_tmp_var (integer_type_node);
9136 g = gimple_build_call (t, 0);
9137 gimple_call_set_lhs (g, thr_num);
9138 gimple_seq_add_stmt (end, g);
9139 if (cancellable)
9141 tree c;
9142 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9143 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9144 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9145 if (code == OMP_FOR)
9146 c = gimple_omp_for_clauses (ctx->stmt);
9147 else if (code == OMP_SECTIONS)
9148 c = gimple_omp_sections_clauses (ctx->stmt);
9149 else /* if (code == OMP_SCOPE) */
9150 c = gimple_omp_scope_clauses (ctx->stmt);
9151 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9152 cancellable = c;
9153 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9154 lab5, lab6);
9155 gimple_seq_add_stmt (end, g);
9156 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9157 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9158 gimple_seq_add_stmt (end, g);
9159 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9160 build_one_cst (TREE_TYPE (idx)));
9161 gimple_seq_add_stmt (end, g);
9162 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9163 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9165 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9166 gimple_seq_add_stmt (end, g);
9167 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9169 if (code != OMP_PARALLEL)
9171 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9172 tree num_thr = create_tmp_var (integer_type_node);
9173 g = gimple_build_call (t, 0);
9174 gimple_call_set_lhs (g, num_thr);
9175 gimple_seq_add_stmt (end, g);
9176 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9177 gimple_seq_add_stmt (end, g);
9178 if (cancellable)
9179 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9181 else
9183 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9184 OMP_CLAUSE__REDUCTEMP_);
9185 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9186 t = fold_convert (size_type_node, t);
9187 gimplify_assign (num_thr_sz, t, end);
9189 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9190 NULL_TREE, NULL_TREE);
9191 tree data = create_tmp_var (pointer_sized_int_node);
9192 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9193 if (code == OMP_TASKLOOP)
9195 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9196 g = gimple_build_cond (NE_EXPR, data,
9197 build_zero_cst (pointer_sized_int_node),
9198 lab1, lab7);
9199 gimple_seq_add_stmt (end, g);
9201 gimple_seq_add_stmt (end, gimple_build_label (lab1));
9202 tree ptr;
9203 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9204 ptr = create_tmp_var (build_pointer_type (record_type));
9205 else
9206 ptr = create_tmp_var (ptr_type_node);
9207 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9209 tree field = TYPE_FIELDS (record_type);
9210 cnt = 0;
9211 if (cancellable)
9212 field = DECL_CHAIN (DECL_CHAIN (field));
9213 for (int pass = 0; pass < 2; pass++)
9215 tree decl, type, next;
9216 for (tree c = clauses;
9217 omp_task_reduction_iterate (pass, code, ccode,
9218 &c, &decl, &type, &next); c = next)
9220 tree var = decl, ref;
9221 if (TREE_CODE (decl) == MEM_REF)
9223 var = TREE_OPERAND (var, 0);
9224 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9225 var = TREE_OPERAND (var, 0);
9226 tree v = var;
9227 if (TREE_CODE (var) == ADDR_EXPR)
9228 var = TREE_OPERAND (var, 0);
9229 else if (INDIRECT_REF_P (var))
9230 var = TREE_OPERAND (var, 0);
9231 tree orig_var = var;
9232 if (is_variable_sized (var))
9234 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9235 var = DECL_VALUE_EXPR (var);
9236 gcc_assert (INDIRECT_REF_P (var));
9237 var = TREE_OPERAND (var, 0);
9238 gcc_assert (DECL_P (var));
9240 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9241 if (orig_var != var)
9242 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9243 else if (TREE_CODE (v) == ADDR_EXPR)
9244 t = build_fold_addr_expr (t);
9245 else if (INDIRECT_REF_P (v))
9246 t = build_fold_indirect_ref (t);
9247 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9249 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9250 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9251 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9253 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9254 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9255 fold_convert (size_type_node,
9256 TREE_OPERAND (decl, 1)));
9258 else
9260 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9261 if (!omp_privatize_by_reference (decl))
9262 t = build_fold_addr_expr (t);
9264 t = fold_convert (pointer_sized_int_node, t);
9265 seq = NULL;
9266 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9267 gimple_seq_add_seq (start, seq);
9268 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9269 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9270 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9271 t = unshare_expr (byte_position (field));
9272 t = fold_convert (pointer_sized_int_node, t);
9273 ctx->task_reduction_map->put (c, cnt);
9274 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9275 ? t : NULL_TREE);
9276 seq = NULL;
9277 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9278 gimple_seq_add_seq (start, seq);
9279 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9280 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9281 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9283 tree bfield = DECL_CHAIN (field);
9284 tree cond;
9285 if (code == OMP_PARALLEL
9286 || code == OMP_FOR
9287 || code == OMP_SECTIONS
9288 || code == OMP_SCOPE)
9289 /* In parallel, worksharing or scope all threads unconditionally
9290 initialize all their task reduction private variables. */
9291 cond = boolean_true_node;
9292 else if (TREE_TYPE (ptr) == ptr_type_node)
9294 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9295 unshare_expr (byte_position (bfield)));
9296 seq = NULL;
9297 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9298 gimple_seq_add_seq (end, seq);
9299 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9300 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9301 build_int_cst (pbool, 0));
9303 else
9304 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9305 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9306 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9307 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9308 tree condv = create_tmp_var (boolean_type_node);
9309 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9310 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9311 lab3, lab4);
9312 gimple_seq_add_stmt (end, g);
9313 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9314 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9316 /* If this reduction doesn't need destruction and parallel
9317 has been cancelled, there is nothing to do for this
9318 reduction, so jump around the merge operation. */
9319 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9320 g = gimple_build_cond (NE_EXPR, cancellable,
9321 build_zero_cst (TREE_TYPE (cancellable)),
9322 lab4, lab5);
9323 gimple_seq_add_stmt (end, g);
9324 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9327 tree new_var;
9328 if (TREE_TYPE (ptr) == ptr_type_node)
9330 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9331 unshare_expr (byte_position (field)));
9332 seq = NULL;
9333 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9334 gimple_seq_add_seq (end, seq);
9335 tree pbool = build_pointer_type (TREE_TYPE (field));
9336 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9337 build_int_cst (pbool, 0));
9339 else
9340 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9341 build_simple_mem_ref (ptr), field, NULL_TREE);
9343 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9344 if (TREE_CODE (decl) != MEM_REF
9345 && omp_privatize_by_reference (decl))
9346 ref = build_simple_mem_ref (ref);
9347 /* reduction(-:var) sums up the partial results, so it acts
9348 identically to reduction(+:var). */
9349 if (rcode == MINUS_EXPR)
9350 rcode = PLUS_EXPR;
9351 if (TREE_CODE (decl) == MEM_REF)
9353 tree type = TREE_TYPE (new_var);
9354 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9355 tree i = create_tmp_var (TREE_TYPE (v));
9356 tree ptype = build_pointer_type (TREE_TYPE (type));
9357 if (DECL_P (v))
9359 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9360 tree vv = create_tmp_var (TREE_TYPE (v));
9361 gimplify_assign (vv, v, start);
9362 v = vv;
9364 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9365 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9366 new_var = build_fold_addr_expr (new_var);
9367 new_var = fold_convert (ptype, new_var);
9368 ref = fold_convert (ptype, ref);
9369 tree m = create_tmp_var (ptype);
9370 gimplify_assign (m, new_var, end);
9371 new_var = m;
9372 m = create_tmp_var (ptype);
9373 gimplify_assign (m, ref, end);
9374 ref = m;
9375 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9376 tree body = create_artificial_label (UNKNOWN_LOCATION);
9377 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9378 gimple_seq_add_stmt (end, gimple_build_label (body));
9379 tree priv = build_simple_mem_ref (new_var);
9380 tree out = build_simple_mem_ref (ref);
9381 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9383 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9384 tree decl_placeholder
9385 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9386 tree lab6 = NULL_TREE;
9387 if (cancellable)
9389 /* If this reduction needs destruction and parallel
9390 has been cancelled, jump around the merge operation
9391 to the destruction. */
9392 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9393 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9394 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9395 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9396 lab6, lab5);
9397 gimple_seq_add_stmt (end, g);
9398 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9400 SET_DECL_VALUE_EXPR (placeholder, out);
9401 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9402 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9403 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9404 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9405 gimple_seq_add_seq (end,
9406 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9407 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9408 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9410 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9411 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9413 if (cancellable)
9414 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9415 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9416 if (x)
9418 gimple_seq tseq = NULL;
9419 gimplify_stmt (&x, &tseq);
9420 gimple_seq_add_seq (end, tseq);
9423 else
9425 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9426 out = unshare_expr (out);
9427 gimplify_assign (out, x, end);
9429 gimple *g
9430 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9431 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9432 gimple_seq_add_stmt (end, g);
9433 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9434 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9435 gimple_seq_add_stmt (end, g);
9436 g = gimple_build_assign (i, PLUS_EXPR, i,
9437 build_int_cst (TREE_TYPE (i), 1));
9438 gimple_seq_add_stmt (end, g);
9439 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9440 gimple_seq_add_stmt (end, g);
9441 gimple_seq_add_stmt (end, gimple_build_label (endl));
9443 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9445 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9446 tree oldv = NULL_TREE;
9447 tree lab6 = NULL_TREE;
9448 if (cancellable)
9450 /* If this reduction needs destruction and parallel
9451 has been cancelled, jump around the merge operation
9452 to the destruction. */
9453 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9454 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9455 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9456 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9457 lab6, lab5);
9458 gimple_seq_add_stmt (end, g);
9459 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9461 if (omp_privatize_by_reference (decl)
9462 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9463 TREE_TYPE (ref)))
9464 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9465 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9466 tree refv = create_tmp_var (TREE_TYPE (ref));
9467 gimplify_assign (refv, ref, end);
9468 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9469 SET_DECL_VALUE_EXPR (placeholder, ref);
9470 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9471 tree d = maybe_lookup_decl (decl, ctx);
9472 gcc_assert (d);
9473 if (DECL_HAS_VALUE_EXPR_P (d))
9474 oldv = DECL_VALUE_EXPR (d);
9475 if (omp_privatize_by_reference (var))
9477 tree v = fold_convert (TREE_TYPE (d),
9478 build_fold_addr_expr (new_var));
9479 SET_DECL_VALUE_EXPR (d, v);
9481 else
9482 SET_DECL_VALUE_EXPR (d, new_var);
9483 DECL_HAS_VALUE_EXPR_P (d) = 1;
9484 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9485 if (oldv)
9486 SET_DECL_VALUE_EXPR (d, oldv);
9487 else
9489 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9490 DECL_HAS_VALUE_EXPR_P (d) = 0;
9492 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9493 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9494 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9495 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9496 if (cancellable)
9497 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9498 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9499 if (x)
9501 gimple_seq tseq = NULL;
9502 gimplify_stmt (&x, &tseq);
9503 gimple_seq_add_seq (end, tseq);
9506 else
9508 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9509 ref = unshare_expr (ref);
9510 gimplify_assign (ref, x, end);
9512 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9513 ++cnt;
9514 field = DECL_CHAIN (bfield);
9518 if (code == OMP_TASKGROUP)
9520 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9521 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9522 gimple_seq_add_stmt (start, g);
9524 else
9526 tree c;
9527 if (code == OMP_FOR)
9528 c = gimple_omp_for_clauses (ctx->stmt);
9529 else if (code == OMP_SECTIONS)
9530 c = gimple_omp_sections_clauses (ctx->stmt);
9531 else if (code == OMP_SCOPE)
9532 c = gimple_omp_scope_clauses (ctx->stmt);
9533 else
9534 c = gimple_omp_taskreg_clauses (ctx->stmt);
9535 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9536 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9537 build_fold_addr_expr (avar));
9538 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9541 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9542 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9543 size_one_node));
9544 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9545 gimple_seq_add_stmt (end, g);
9546 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9547 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9549 enum built_in_function bfn
9550 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9551 t = builtin_decl_explicit (bfn);
9552 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9553 tree arg;
9554 if (cancellable)
9556 arg = create_tmp_var (c_bool_type);
9557 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9558 cancellable));
9560 else
9561 arg = build_int_cst (c_bool_type, 0);
9562 g = gimple_build_call (t, 1, arg);
9564 else
9566 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9567 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9569 gimple_seq_add_stmt (end, g);
9570 if (lab7)
9571 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9572 t = build_constructor (atype, NULL);
9573 TREE_THIS_VOLATILE (t) = 1;
9574 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9577 /* Expand code for an OpenMP taskgroup directive. */
9579 static void
9580 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9582 gimple *stmt = gsi_stmt (*gsi_p);
9583 gcall *x;
9584 gbind *bind;
9585 gimple_seq dseq = NULL;
9586 tree block = make_node (BLOCK);
9588 bind = gimple_build_bind (NULL, NULL, block);
9589 gsi_replace (gsi_p, bind, true);
9590 gimple_bind_add_stmt (bind, stmt);
9592 push_gimplify_context ();
9594 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9596 gimple_bind_add_stmt (bind, x);
9598 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9599 gimple_omp_taskgroup_clauses (stmt),
9600 gimple_bind_body_ptr (bind), &dseq);
9602 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9603 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9604 gimple_omp_set_body (stmt, NULL);
9606 gimple_bind_add_seq (bind, dseq);
9608 pop_gimplify_context (bind);
9610 gimple_bind_append_vars (bind, ctx->block_vars);
9611 BLOCK_VARS (block) = ctx->block_vars;
9615 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9617 static void
9618 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9619 omp_context *ctx)
9621 struct omp_for_data fd;
9622 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9623 return;
9625 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9626 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9627 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9628 if (!fd.ordered)
9629 return;
9631 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9632 tree c = gimple_omp_ordered_clauses (ord_stmt);
9633 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
9634 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
9636 /* Merge depend clauses from multiple adjacent
9637 #pragma omp ordered depend(sink:...) constructs
9638 into one #pragma omp ordered depend(sink:...), so that
9639 we can optimize them together. */
9640 gimple_stmt_iterator gsi = *gsi_p;
9641 gsi_next (&gsi);
9642 while (!gsi_end_p (gsi))
9644 gimple *stmt = gsi_stmt (gsi);
9645 if (is_gimple_debug (stmt)
9646 || gimple_code (stmt) == GIMPLE_NOP)
9648 gsi_next (&gsi);
9649 continue;
9651 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9652 break;
9653 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9654 c = gimple_omp_ordered_clauses (ord_stmt2);
9655 if (c == NULL_TREE
9656 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS
9657 || OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK)
9658 break;
9659 while (*list_p)
9660 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9661 *list_p = c;
9662 gsi_remove (&gsi, true);
9666 /* Canonicalize sink dependence clauses into one folded clause if
9667 possible.
9669 The basic algorithm is to create a sink vector whose first
9670 element is the GCD of all the first elements, and whose remaining
9671 elements are the minimum of the subsequent columns.
9673 We ignore dependence vectors whose first element is zero because
9674 such dependencies are known to be executed by the same thread.
9676 We take into account the direction of the loop, so a minimum
9677 becomes a maximum if the loop is iterating forwards. We also
9678 ignore sink clauses where the loop direction is unknown, or where
9679 the offsets are clearly invalid because they are not a multiple
9680 of the loop increment.
9682 For example:
9684 #pragma omp for ordered(2)
9685 for (i=0; i < N; ++i)
9686 for (j=0; j < M; ++j)
9688 #pragma omp ordered \
9689 depend(sink:i-8,j-2) \
9690 depend(sink:i,j-1) \ // Completely ignored because i+0.
9691 depend(sink:i-4,j-3) \
9692 depend(sink:i-6,j-4)
9693 #pragma omp ordered depend(source)
9696 Folded clause is:
9698 depend(sink:-gcd(8,4,6),-min(2,3,4))
9699 -or-
9700 depend(sink:-2,-2)
9703 /* FIXME: Computing GCD's where the first element is zero is
9704 non-trivial in the presence of collapsed loops. Do this later. */
9705 if (fd.collapse > 1)
9706 return;
9708 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9710 /* wide_int is not a POD so it must be default-constructed. */
9711 for (unsigned i = 0; i != 2 * len - 1; ++i)
9712 new (static_cast<void*>(folded_deps + i)) wide_int ();
9714 tree folded_dep = NULL_TREE;
9715 /* TRUE if the first dimension's offset is negative. */
9716 bool neg_offset_p = false;
9718 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9719 unsigned int i;
9720 while ((c = *list_p) != NULL)
9722 bool remove = false;
9724 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS);
9725 if (OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK)
9726 goto next_ordered_clause;
9728 tree vec;
9729 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9730 vec && TREE_CODE (vec) == TREE_LIST;
9731 vec = TREE_CHAIN (vec), ++i)
9733 gcc_assert (i < len);
9735 /* omp_extract_for_data has canonicalized the condition. */
9736 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9737 || fd.loops[i].cond_code == GT_EXPR);
9738 bool forward = fd.loops[i].cond_code == LT_EXPR;
9739 bool maybe_lexically_later = true;
9741 /* While the committee makes up its mind, bail if we have any
9742 non-constant steps. */
9743 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9744 goto lower_omp_ordered_ret;
9746 tree itype = TREE_TYPE (TREE_VALUE (vec));
9747 if (POINTER_TYPE_P (itype))
9748 itype = sizetype;
9749 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9750 TYPE_PRECISION (itype),
9751 TYPE_SIGN (itype));
9753 /* Ignore invalid offsets that are not multiples of the step. */
9754 if (!wi::multiple_of_p (wi::abs (offset),
9755 wi::abs (wi::to_wide (fd.loops[i].step)),
9756 UNSIGNED))
9758 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9759 "ignoring sink clause with offset that is not "
9760 "a multiple of the loop step");
9761 remove = true;
9762 goto next_ordered_clause;
9765 /* Calculate the first dimension. The first dimension of
9766 the folded dependency vector is the GCD of the first
9767 elements, while ignoring any first elements whose offset
9768 is 0. */
9769 if (i == 0)
9771 /* Ignore dependence vectors whose first dimension is 0. */
9772 if (offset == 0)
9774 remove = true;
9775 goto next_ordered_clause;
9777 else
9779 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9781 error_at (OMP_CLAUSE_LOCATION (c),
9782 "first offset must be in opposite direction "
9783 "of loop iterations");
9784 goto lower_omp_ordered_ret;
9786 if (forward)
9787 offset = -offset;
9788 neg_offset_p = forward;
9789 /* Initialize the first time around. */
9790 if (folded_dep == NULL_TREE)
9792 folded_dep = c;
9793 folded_deps[0] = offset;
9795 else
9796 folded_deps[0] = wi::gcd (folded_deps[0],
9797 offset, UNSIGNED);
9800 /* Calculate minimum for the remaining dimensions. */
9801 else
9803 folded_deps[len + i - 1] = offset;
9804 if (folded_dep == c)
9805 folded_deps[i] = offset;
9806 else if (maybe_lexically_later
9807 && !wi::eq_p (folded_deps[i], offset))
9809 if (forward ^ wi::gts_p (folded_deps[i], offset))
9811 unsigned int j;
9812 folded_dep = c;
9813 for (j = 1; j <= i; j++)
9814 folded_deps[j] = folded_deps[len + j - 1];
9816 else
9817 maybe_lexically_later = false;
9821 gcc_assert (i == len);
9823 remove = true;
9825 next_ordered_clause:
9826 if (remove)
9827 *list_p = OMP_CLAUSE_CHAIN (c);
9828 else
9829 list_p = &OMP_CLAUSE_CHAIN (c);
9832 if (folded_dep)
9834 if (neg_offset_p)
9835 folded_deps[0] = -folded_deps[0];
9837 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9838 if (POINTER_TYPE_P (itype))
9839 itype = sizetype;
9841 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9842 = wide_int_to_tree (itype, folded_deps[0]);
9843 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9844 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9847 lower_omp_ordered_ret:
9849 /* Ordered without clauses is #pragma omp threads, while we want
9850 a nop instead if we remove all clauses. */
9851 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9852 gsi_replace (gsi_p, gimple_build_nop (), true);
9856 /* Expand code for an OpenMP ordered directive. */
9858 static void
9859 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9861 tree block;
9862 gimple *stmt = gsi_stmt (*gsi_p), *g;
9863 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9864 gcall *x;
9865 gbind *bind;
9866 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9867 OMP_CLAUSE_SIMD);
9868 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9869 loop. */
9870 bool maybe_simt
9871 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9872 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9873 OMP_CLAUSE_THREADS);
9875 if (gimple_omp_ordered_standalone_p (ord_stmt))
9877 /* FIXME: This is needs to be moved to the expansion to verify various
9878 conditions only testable on cfg with dominators computed, and also
9879 all the depend clauses to be merged still might need to be available
9880 for the runtime checks. */
9881 if (0)
9882 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9883 return;
9886 push_gimplify_context ();
9888 block = make_node (BLOCK);
9889 bind = gimple_build_bind (NULL, NULL, block);
9890 gsi_replace (gsi_p, bind, true);
9891 gimple_bind_add_stmt (bind, stmt);
9893 if (simd)
9895 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9896 build_int_cst (NULL_TREE, threads));
9897 cfun->has_simduid_loops = true;
9899 else
9900 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9902 gimple_bind_add_stmt (bind, x);
9904 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9905 if (maybe_simt)
9907 counter = create_tmp_var (integer_type_node);
9908 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9909 gimple_call_set_lhs (g, counter);
9910 gimple_bind_add_stmt (bind, g);
9912 body = create_artificial_label (UNKNOWN_LOCATION);
9913 test = create_artificial_label (UNKNOWN_LOCATION);
9914 gimple_bind_add_stmt (bind, gimple_build_label (body));
9916 tree simt_pred = create_tmp_var (integer_type_node);
9917 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9918 gimple_call_set_lhs (g, simt_pred);
9919 gimple_bind_add_stmt (bind, g);
9921 tree t = create_artificial_label (UNKNOWN_LOCATION);
9922 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9923 gimple_bind_add_stmt (bind, g);
9925 gimple_bind_add_stmt (bind, gimple_build_label (t));
9927 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9928 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9929 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9930 gimple_omp_set_body (stmt, NULL);
9932 if (maybe_simt)
9934 gimple_bind_add_stmt (bind, gimple_build_label (test));
9935 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9936 gimple_bind_add_stmt (bind, g);
9938 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9939 tree nonneg = create_tmp_var (integer_type_node);
9940 gimple_seq tseq = NULL;
9941 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9942 gimple_bind_add_seq (bind, tseq);
9944 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9945 gimple_call_set_lhs (g, nonneg);
9946 gimple_bind_add_stmt (bind, g);
9948 tree end = create_artificial_label (UNKNOWN_LOCATION);
9949 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9950 gimple_bind_add_stmt (bind, g);
9952 gimple_bind_add_stmt (bind, gimple_build_label (end));
9954 if (simd)
9955 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9956 build_int_cst (NULL_TREE, threads));
9957 else
9958 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9960 gimple_bind_add_stmt (bind, x);
9962 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9964 pop_gimplify_context (bind);
9966 gimple_bind_append_vars (bind, ctx->block_vars);
9967 BLOCK_VARS (block) = gimple_bind_vars (bind);
9971 /* Expand code for an OpenMP scan directive and the structured block
9972 before the scan directive. */
9974 static void
9975 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9977 gimple *stmt = gsi_stmt (*gsi_p);
9978 bool has_clauses
9979 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9980 tree lane = NULL_TREE;
9981 gimple_seq before = NULL;
9982 omp_context *octx = ctx->outer;
9983 gcc_assert (octx);
9984 if (octx->scan_exclusive && !has_clauses)
9986 gimple_stmt_iterator gsi2 = *gsi_p;
9987 gsi_next (&gsi2);
9988 gimple *stmt2 = gsi_stmt (gsi2);
9989 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9990 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9991 the one with exclusive clause(s), comes first. */
9992 if (stmt2
9993 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9994 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9996 gsi_remove (gsi_p, false);
9997 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9998 ctx = maybe_lookup_ctx (stmt2);
9999 gcc_assert (ctx);
10000 lower_omp_scan (gsi_p, ctx);
10001 return;
10005 bool input_phase = has_clauses ^ octx->scan_inclusive;
10006 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10007 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
10008 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10009 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
10010 && !gimple_omp_for_combined_p (octx->stmt));
10011 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
10012 if (is_for_simd && octx->for_simd_scan_phase)
10013 is_simd = false;
10014 if (is_simd)
10015 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
10016 OMP_CLAUSE__SIMDUID_))
10018 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10019 lane = create_tmp_var (unsigned_type_node);
10020 tree t = build_int_cst (integer_type_node,
10021 input_phase ? 1
10022 : octx->scan_inclusive ? 2 : 3);
10023 gimple *g
10024 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10025 gimple_call_set_lhs (g, lane);
10026 gimple_seq_add_stmt (&before, g);
10029 if (is_simd || is_for)
10031 for (tree c = gimple_omp_for_clauses (octx->stmt);
10032 c; c = OMP_CLAUSE_CHAIN (c))
10033 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10034 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10036 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10037 tree var = OMP_CLAUSE_DECL (c);
10038 tree new_var = lookup_decl (var, octx);
10039 tree val = new_var;
10040 tree var2 = NULL_TREE;
10041 tree var3 = NULL_TREE;
10042 tree var4 = NULL_TREE;
10043 tree lane0 = NULL_TREE;
10044 tree new_vard = new_var;
10045 if (omp_privatize_by_reference (var))
10047 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10048 val = new_var;
10050 if (DECL_HAS_VALUE_EXPR_P (new_vard))
10052 val = DECL_VALUE_EXPR (new_vard);
10053 if (new_vard != new_var)
10055 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10056 val = TREE_OPERAND (val, 0);
10058 if (TREE_CODE (val) == ARRAY_REF
10059 && VAR_P (TREE_OPERAND (val, 0)))
10061 tree v = TREE_OPERAND (val, 0);
10062 if (lookup_attribute ("omp simd array",
10063 DECL_ATTRIBUTES (v)))
10065 val = unshare_expr (val);
10066 lane0 = TREE_OPERAND (val, 1);
10067 TREE_OPERAND (val, 1) = lane;
10068 var2 = lookup_decl (v, octx);
10069 if (octx->scan_exclusive)
10070 var4 = lookup_decl (var2, octx);
10071 if (input_phase
10072 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10073 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10074 if (!input_phase)
10076 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10077 var2, lane, NULL_TREE, NULL_TREE);
10078 TREE_THIS_NOTRAP (var2) = 1;
10079 if (octx->scan_exclusive)
10081 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10082 var4, lane, NULL_TREE,
10083 NULL_TREE);
10084 TREE_THIS_NOTRAP (var4) = 1;
10087 else
10088 var2 = val;
10091 gcc_assert (var2);
10093 else
10095 var2 = build_outer_var_ref (var, octx);
10096 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10098 var3 = maybe_lookup_decl (new_vard, octx);
10099 if (var3 == new_vard || var3 == NULL_TREE)
10100 var3 = NULL_TREE;
10101 else if (is_simd && octx->scan_exclusive && !input_phase)
10103 var4 = maybe_lookup_decl (var3, octx);
10104 if (var4 == var3 || var4 == NULL_TREE)
10106 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10108 var4 = var3;
10109 var3 = NULL_TREE;
10111 else
10112 var4 = NULL_TREE;
10116 if (is_simd
10117 && octx->scan_exclusive
10118 && !input_phase
10119 && var4 == NULL_TREE)
10120 var4 = create_tmp_var (TREE_TYPE (val));
10122 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10124 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10125 if (input_phase)
10127 if (var3)
10129 /* If we've added a separate identity element
10130 variable, copy it over into val. */
10131 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10132 var3);
10133 gimplify_and_add (x, &before);
10135 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10137 /* Otherwise, assign to it the identity element. */
10138 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10139 if (is_for)
10140 tseq = copy_gimple_seq_and_replace_locals (tseq);
10141 tree ref = build_outer_var_ref (var, octx);
10142 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10143 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10144 if (x)
10146 if (new_vard != new_var)
10147 val = build_fold_addr_expr_loc (clause_loc, val);
10148 SET_DECL_VALUE_EXPR (new_vard, val);
10150 SET_DECL_VALUE_EXPR (placeholder, ref);
10151 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10152 lower_omp (&tseq, octx);
10153 if (x)
10154 SET_DECL_VALUE_EXPR (new_vard, x);
10155 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10156 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10157 gimple_seq_add_seq (&before, tseq);
10158 if (is_simd)
10159 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10162 else if (is_simd)
10164 tree x;
10165 if (octx->scan_exclusive)
10167 tree v4 = unshare_expr (var4);
10168 tree v2 = unshare_expr (var2);
10169 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10170 gimplify_and_add (x, &before);
10172 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10173 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10174 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10175 tree vexpr = val;
10176 if (x && new_vard != new_var)
10177 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10178 if (x)
10179 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10180 SET_DECL_VALUE_EXPR (placeholder, var2);
10181 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10182 lower_omp (&tseq, octx);
10183 gimple_seq_add_seq (&before, tseq);
10184 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10185 if (x)
10186 SET_DECL_VALUE_EXPR (new_vard, x);
10187 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10188 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10189 if (octx->scan_inclusive)
10191 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10192 var2);
10193 gimplify_and_add (x, &before);
10195 else if (lane0 == NULL_TREE)
10197 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10198 var4);
10199 gimplify_and_add (x, &before);
10203 else
10205 if (input_phase)
10207 /* input phase. Set val to initializer before
10208 the body. */
10209 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10210 gimplify_assign (val, x, &before);
10212 else if (is_simd)
10214 /* scan phase. */
10215 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10216 if (code == MINUS_EXPR)
10217 code = PLUS_EXPR;
10219 tree x = build2 (code, TREE_TYPE (var2),
10220 unshare_expr (var2), unshare_expr (val));
10221 if (octx->scan_inclusive)
10223 gimplify_assign (unshare_expr (var2), x, &before);
10224 gimplify_assign (val, var2, &before);
10226 else
10228 gimplify_assign (unshare_expr (var4),
10229 unshare_expr (var2), &before);
10230 gimplify_assign (var2, x, &before);
10231 if (lane0 == NULL_TREE)
10232 gimplify_assign (val, var4, &before);
10236 if (octx->scan_exclusive && !input_phase && lane0)
10238 tree vexpr = unshare_expr (var4);
10239 TREE_OPERAND (vexpr, 1) = lane0;
10240 if (new_vard != new_var)
10241 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10242 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10246 if (is_simd && !is_for_simd)
10248 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10249 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10250 gsi_replace (gsi_p, gimple_build_nop (), true);
10251 return;
10253 lower_omp (gimple_omp_body_ptr (stmt), octx);
10254 if (before)
10256 gimple_stmt_iterator gsi = gsi_start (*gimple_omp_body_ptr (stmt));
10257 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10262 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10263 substitution of a couple of function calls. But in the NAMED case,
10264 requires that languages coordinate a symbol name. It is therefore
10265 best put here in common code. */
10267 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10269 static void
10270 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10272 tree block;
10273 tree name, lock, unlock;
10274 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10275 gbind *bind;
10276 location_t loc = gimple_location (stmt);
10277 gimple_seq tbody;
10279 name = gimple_omp_critical_name (stmt);
10280 if (name)
10282 tree decl;
10284 if (!critical_name_mutexes)
10285 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10287 tree *n = critical_name_mutexes->get (name);
10288 if (n == NULL)
10290 char *new_str;
10292 decl = create_tmp_var_raw (ptr_type_node);
10294 new_str = ACONCAT ((".gomp_critical_user_",
10295 IDENTIFIER_POINTER (name), NULL));
10296 DECL_NAME (decl) = get_identifier (new_str);
10297 TREE_PUBLIC (decl) = 1;
10298 TREE_STATIC (decl) = 1;
10299 DECL_COMMON (decl) = 1;
10300 DECL_ARTIFICIAL (decl) = 1;
10301 DECL_IGNORED_P (decl) = 1;
10303 varpool_node::finalize_decl (decl);
10305 critical_name_mutexes->put (name, decl);
10307 else
10308 decl = *n;
10310 /* If '#pragma omp critical' is inside offloaded region or
10311 inside function marked as offloadable, the symbol must be
10312 marked as offloadable too. */
10313 omp_context *octx;
10314 if (cgraph_node::get (current_function_decl)->offloadable)
10315 varpool_node::get_create (decl)->offloadable = 1;
10316 else
10317 for (octx = ctx->outer; octx; octx = octx->outer)
10318 if (is_gimple_omp_offloaded (octx->stmt))
10320 varpool_node::get_create (decl)->offloadable = 1;
10321 break;
10324 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10325 lock = build_call_expr_loc (loc, lock, 1,
10326 build_fold_addr_expr_loc (loc, decl));
10328 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10329 unlock = build_call_expr_loc (loc, unlock, 1,
10330 build_fold_addr_expr_loc (loc, decl));
10332 else
10334 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10335 lock = build_call_expr_loc (loc, lock, 0);
10337 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10338 unlock = build_call_expr_loc (loc, unlock, 0);
10341 push_gimplify_context ();
10343 block = make_node (BLOCK);
10344 bind = gimple_build_bind (NULL, NULL, block);
10345 gsi_replace (gsi_p, bind, true);
10346 gimple_bind_add_stmt (bind, stmt);
10348 tbody = gimple_bind_body (bind);
10349 gimplify_and_add (lock, &tbody);
10350 gimple_bind_set_body (bind, tbody);
10352 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10353 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10354 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10355 gimple_omp_set_body (stmt, NULL);
10357 tbody = gimple_bind_body (bind);
10358 gimplify_and_add (unlock, &tbody);
10359 gimple_bind_set_body (bind, tbody);
10361 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10363 pop_gimplify_context (bind);
10364 gimple_bind_append_vars (bind, ctx->block_vars);
10365 BLOCK_VARS (block) = gimple_bind_vars (bind);
10368 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10369 for a lastprivate clause. Given a loop control predicate of (V
10370 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10371 is appended to *DLIST, iterator initialization is appended to
10372 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10373 to be emitted in a critical section. */
10375 static void
10376 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10377 gimple_seq *dlist, gimple_seq *clist,
10378 struct omp_context *ctx)
10380 tree clauses, cond, vinit;
10381 enum tree_code cond_code;
10382 gimple_seq stmts;
10384 cond_code = fd->loop.cond_code;
10385 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10387 /* When possible, use a strict equality expression. This can let VRP
10388 type optimizations deduce the value and remove a copy. */
10389 if (tree_fits_shwi_p (fd->loop.step))
10391 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10392 if (step == 1 || step == -1)
10393 cond_code = EQ_EXPR;
10396 tree n2 = fd->loop.n2;
10397 if (fd->collapse > 1
10398 && TREE_CODE (n2) != INTEGER_CST
10399 && gimple_omp_for_combined_into_p (fd->for_stmt))
10401 struct omp_context *taskreg_ctx = NULL;
10402 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10404 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10405 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10406 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10408 if (gimple_omp_for_combined_into_p (gfor))
10410 gcc_assert (ctx->outer->outer
10411 && is_parallel_ctx (ctx->outer->outer));
10412 taskreg_ctx = ctx->outer->outer;
10414 else
10416 struct omp_for_data outer_fd;
10417 omp_extract_for_data (gfor, &outer_fd, NULL);
10418 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10421 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10422 taskreg_ctx = ctx->outer->outer;
10424 else if (is_taskreg_ctx (ctx->outer))
10425 taskreg_ctx = ctx->outer;
10426 if (taskreg_ctx)
10428 int i;
10429 tree taskreg_clauses
10430 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10431 tree innerc = omp_find_clause (taskreg_clauses,
10432 OMP_CLAUSE__LOOPTEMP_);
10433 gcc_assert (innerc);
10434 int count = fd->collapse;
10435 if (fd->non_rect
10436 && fd->last_nonrect == fd->first_nonrect + 1)
10437 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10438 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10439 count += 4;
10440 for (i = 0; i < count; i++)
10442 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10443 OMP_CLAUSE__LOOPTEMP_);
10444 gcc_assert (innerc);
10446 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10447 OMP_CLAUSE__LOOPTEMP_);
10448 if (innerc)
10449 n2 = fold_convert (TREE_TYPE (n2),
10450 lookup_decl (OMP_CLAUSE_DECL (innerc),
10451 taskreg_ctx));
10454 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10456 clauses = gimple_omp_for_clauses (fd->for_stmt);
10457 stmts = NULL;
10458 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10459 if (!gimple_seq_empty_p (stmts))
10461 gimple_seq_add_seq (&stmts, *dlist);
10462 *dlist = stmts;
10464 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10465 vinit = fd->loop.n1;
10466 if (cond_code == EQ_EXPR
10467 && tree_fits_shwi_p (fd->loop.n2)
10468 && ! integer_zerop (fd->loop.n2))
10469 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10470 else
10471 vinit = unshare_expr (vinit);
10473 /* Initialize the iterator variable, so that threads that don't execute
10474 any iterations don't execute the lastprivate clauses by accident. */
10475 gimplify_assign (fd->loop.v, vinit, body_p);
10479 /* OpenACC privatization.
10481 Or, in other words, *sharing* at the respective OpenACC level of
10482 parallelism.
10484 From a correctness perspective, a non-addressable variable can't be accessed
10485 outside the current thread, so it can go in a (faster than shared memory)
10486 register -- though that register may need to be broadcast in some
10487 circumstances. A variable can only meaningfully be "shared" across workers
10488 or vector lanes if its address is taken, e.g. by a call to an atomic
10489 builtin.
10491 From an optimisation perspective, the answer might be fuzzier: maybe
10492 sometimes, using shared memory directly would be faster than
10493 broadcasting. */
10495 static void
10496 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10497 const location_t loc, const tree c,
10498 const tree decl)
10500 const dump_user_location_t d_u_loc
10501 = dump_user_location_t::from_location_t (loc);
10502 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10503 #if __GNUC__ >= 10
10504 # pragma GCC diagnostic push
10505 # pragma GCC diagnostic ignored "-Wformat"
10506 #endif
10507 dump_printf_loc (l_dump_flags, d_u_loc,
10508 "variable %<%T%> ", decl);
10509 #if __GNUC__ >= 10
10510 # pragma GCC diagnostic pop
10511 #endif
10512 if (c)
10513 dump_printf (l_dump_flags,
10514 "in %qs clause ",
10515 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10516 else
10517 dump_printf (l_dump_flags,
10518 "declared in block ");
10521 static bool
10522 oacc_privatization_candidate_p (const location_t loc, const tree c,
10523 const tree decl)
10525 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10527 /* There is some differentiation depending on block vs. clause. */
10528 bool block = !c;
10530 bool res = true;
10532 if (res && !VAR_P (decl))
10534 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10535 privatized into a new VAR_DECL. */
10536 gcc_checking_assert (TREE_CODE (decl) != PARM_DECL);
10538 res = false;
10540 if (dump_enabled_p ())
10542 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10543 dump_printf (l_dump_flags,
10544 "potentially has improper OpenACC privatization level: %qs\n",
10545 get_tree_code_name (TREE_CODE (decl)));
10549 if (res && block && TREE_STATIC (decl))
10551 res = false;
10553 if (dump_enabled_p ())
10555 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10556 dump_printf (l_dump_flags,
10557 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10558 "static");
10562 if (res && block && DECL_EXTERNAL (decl))
10564 res = false;
10566 if (dump_enabled_p ())
10568 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10569 dump_printf (l_dump_flags,
10570 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10571 "external");
10575 if (res && !TREE_ADDRESSABLE (decl))
10577 res = false;
10579 if (dump_enabled_p ())
10581 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10582 dump_printf (l_dump_flags,
10583 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10584 "not addressable");
10588 /* If an artificial variable has been added to a bind, e.g.
10589 a compiler-generated temporary structure used by the Fortran front-end, do
10590 not consider it as a privatization candidate. Note that variables on
10591 the stack are private per-thread by default: making them "gang-private"
10592 for OpenACC actually means to share a single instance of a variable
10593 amongst all workers and threads spawned within each gang.
10594 At present, no compiler-generated artificial variables require such
10595 sharing semantics, so this is safe. */
10597 if (res && block && DECL_ARTIFICIAL (decl))
10599 res = false;
10601 if (dump_enabled_p ())
10603 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10604 dump_printf (l_dump_flags,
10605 "isn%'t candidate for adjusting OpenACC privatization "
10606 "level: %s\n", "artificial");
10610 if (res)
10612 if (dump_enabled_p ())
10614 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10615 dump_printf (l_dump_flags,
10616 "is candidate for adjusting OpenACC privatization level\n");
10620 if (dump_file && (dump_flags & TDF_DETAILS))
10622 print_generic_decl (dump_file, decl, dump_flags);
10623 fprintf (dump_file, "\n");
10626 return res;
10629 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10630 CTX. */
10632 static void
10633 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10635 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10636 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10638 tree decl = OMP_CLAUSE_DECL (c);
10640 tree new_decl = lookup_decl (decl, ctx);
10642 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c,
10643 new_decl))
10644 continue;
10646 gcc_checking_assert
10647 (!ctx->oacc_privatization_candidates.contains (new_decl));
10648 ctx->oacc_privatization_candidates.safe_push (new_decl);
10652 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10653 CTX. */
10655 static void
10656 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10658 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10660 tree new_decl = lookup_decl (decl, ctx);
10661 gcc_checking_assert (new_decl == decl);
10663 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL,
10664 new_decl))
10665 continue;
10667 gcc_checking_assert
10668 (!ctx->oacc_privatization_candidates.contains (new_decl));
10669 ctx->oacc_privatization_candidates.safe_push (new_decl);
10673 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10675 static tree
10676 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10677 struct walk_stmt_info *wi)
10679 gimple *stmt = gsi_stmt (*gsi_p);
10681 *handled_ops_p = true;
10682 switch (gimple_code (stmt))
10684 WALK_SUBSTMTS;
10686 case GIMPLE_OMP_FOR:
10687 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10688 && gimple_omp_for_combined_into_p (stmt))
10689 *handled_ops_p = false;
10690 break;
10692 case GIMPLE_OMP_SCAN:
10693 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10694 return integer_zero_node;
10695 default:
10696 break;
10698 return NULL;
10701 /* Helper function for lower_omp_for, add transformations for a worksharing
10702 loop with scan directives inside of it.
10703 For worksharing loop not combined with simd, transform:
10704 #pragma omp for reduction(inscan,+:r) private(i)
10705 for (i = 0; i < n; i = i + 1)
10708 update (r);
10710 #pragma omp scan inclusive(r)
10712 use (r);
10716 into two worksharing loops + code to merge results:
10718 num_threads = omp_get_num_threads ();
10719 thread_num = omp_get_thread_num ();
10720 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10721 <D.2099>:
10722 var2 = r;
10723 goto <D.2101>;
10724 <D.2100>:
10725 // For UDRs this is UDR init, or if ctors are needed, copy from
10726 // var3 that has been constructed to contain the neutral element.
10727 var2 = 0;
10728 <D.2101>:
10729 ivar = 0;
10730 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10731 // a shared array with num_threads elements and rprivb to a local array
10732 // number of elements equal to the number of (contiguous) iterations the
10733 // current thread will perform. controlb and controlp variables are
10734 // temporaries to handle deallocation of rprivb at the end of second
10735 // GOMP_FOR.
10736 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10737 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10738 for (i = 0; i < n; i = i + 1)
10741 // For UDRs this is UDR init or copy from var3.
10742 r = 0;
10743 // This is the input phase from user code.
10744 update (r);
10747 // For UDRs this is UDR merge.
10748 var2 = var2 + r;
10749 // Rather than handing it over to the user, save to local thread's
10750 // array.
10751 rprivb[ivar] = var2;
10752 // For exclusive scan, the above two statements are swapped.
10753 ivar = ivar + 1;
10756 // And remember the final value from this thread's into the shared
10757 // rpriva array.
10758 rpriva[(sizetype) thread_num] = var2;
10759 // If more than one thread, compute using Work-Efficient prefix sum
10760 // the inclusive parallel scan of the rpriva array.
10761 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10762 <D.2102>:
10763 GOMP_barrier ();
10764 down = 0;
10765 k = 1;
10766 num_threadsu = (unsigned int) num_threads;
10767 thread_numup1 = (unsigned int) thread_num + 1;
10768 <D.2108>:
10769 twok = k << 1;
10770 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10771 <D.2110>:
10772 down = 4294967295;
10773 k = k >> 1;
10774 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10775 <D.2112>:
10776 k = k >> 1;
10777 <D.2111>:
10778 twok = k << 1;
10779 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10780 mul = REALPART_EXPR <cplx>;
10781 ovf = IMAGPART_EXPR <cplx>;
10782 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10783 <D.2116>:
10784 andv = k & down;
10785 andvm1 = andv + 4294967295;
10786 l = mul + andvm1;
10787 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10788 <D.2120>:
10789 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10790 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10791 rpriva[l] = rpriva[l - k] + rpriva[l];
10792 <D.2117>:
10793 if (down == 0) goto <D.2121>; else goto <D.2122>;
10794 <D.2121>:
10795 k = k << 1;
10796 goto <D.2123>;
10797 <D.2122>:
10798 k = k >> 1;
10799 <D.2123>:
10800 GOMP_barrier ();
10801 if (k != 0) goto <D.2108>; else goto <D.2103>;
10802 <D.2103>:
10803 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10804 <D.2124>:
10805 // For UDRs this is UDR init or copy from var3.
10806 var2 = 0;
10807 goto <D.2126>;
10808 <D.2125>:
10809 var2 = rpriva[thread_num - 1];
10810 <D.2126>:
10811 ivar = 0;
10812 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10813 reduction(inscan,+:r) private(i)
10814 for (i = 0; i < n; i = i + 1)
10817 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10818 r = var2 + rprivb[ivar];
10821 // This is the scan phase from user code.
10822 use (r);
10823 // Plus a bump of the iterator.
10824 ivar = ivar + 1;
10826 } */
10828 static void
10829 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10830 struct omp_for_data *fd, omp_context *ctx)
10832 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10833 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10835 gimple_seq body = gimple_omp_body (stmt);
10836 gimple_stmt_iterator input1_gsi = gsi_none ();
10837 struct walk_stmt_info wi;
10838 memset (&wi, 0, sizeof (wi));
10839 wi.val_only = true;
10840 wi.info = (void *) &input1_gsi;
10841 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10842 gcc_assert (!gsi_end_p (input1_gsi));
10844 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10845 gimple_stmt_iterator gsi = input1_gsi;
10846 gsi_next (&gsi);
10847 gimple_stmt_iterator scan1_gsi = gsi;
10848 gimple *scan_stmt1 = gsi_stmt (gsi);
10849 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10851 gimple_seq input_body = gimple_omp_body (input_stmt1);
10852 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10853 gimple_omp_set_body (input_stmt1, NULL);
10854 gimple_omp_set_body (scan_stmt1, NULL);
10855 gimple_omp_set_body (stmt, NULL);
10857 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10858 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10859 gimple_omp_set_body (stmt, body);
10860 gimple_omp_set_body (input_stmt1, input_body);
10862 gimple_stmt_iterator input2_gsi = gsi_none ();
10863 memset (&wi, 0, sizeof (wi));
10864 wi.val_only = true;
10865 wi.info = (void *) &input2_gsi;
10866 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10867 gcc_assert (!gsi_end_p (input2_gsi));
10869 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10870 gsi = input2_gsi;
10871 gsi_next (&gsi);
10872 gimple_stmt_iterator scan2_gsi = gsi;
10873 gimple *scan_stmt2 = gsi_stmt (gsi);
10874 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10875 gimple_omp_set_body (scan_stmt2, scan_body);
10877 gimple_stmt_iterator input3_gsi = gsi_none ();
10878 gimple_stmt_iterator scan3_gsi = gsi_none ();
10879 gimple_stmt_iterator input4_gsi = gsi_none ();
10880 gimple_stmt_iterator scan4_gsi = gsi_none ();
10881 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10882 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10883 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10884 if (is_for_simd)
10886 memset (&wi, 0, sizeof (wi));
10887 wi.val_only = true;
10888 wi.info = (void *) &input3_gsi;
10889 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10890 gcc_assert (!gsi_end_p (input3_gsi));
10892 input_stmt3 = gsi_stmt (input3_gsi);
10893 gsi = input3_gsi;
10894 gsi_next (&gsi);
10895 scan3_gsi = gsi;
10896 scan_stmt3 = gsi_stmt (gsi);
10897 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10899 memset (&wi, 0, sizeof (wi));
10900 wi.val_only = true;
10901 wi.info = (void *) &input4_gsi;
10902 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10903 gcc_assert (!gsi_end_p (input4_gsi));
10905 input_stmt4 = gsi_stmt (input4_gsi);
10906 gsi = input4_gsi;
10907 gsi_next (&gsi);
10908 scan4_gsi = gsi;
10909 scan_stmt4 = gsi_stmt (gsi);
10910 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10912 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10913 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10916 tree num_threads = create_tmp_var (integer_type_node);
10917 tree thread_num = create_tmp_var (integer_type_node);
10918 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10919 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10920 gimple *g = gimple_build_call (nthreads_decl, 0);
10921 gimple_call_set_lhs (g, num_threads);
10922 gimple_seq_add_stmt (body_p, g);
10923 g = gimple_build_call (threadnum_decl, 0);
10924 gimple_call_set_lhs (g, thread_num);
10925 gimple_seq_add_stmt (body_p, g);
10927 tree ivar = create_tmp_var (sizetype);
10928 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10929 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10930 tree k = create_tmp_var (unsigned_type_node);
10931 tree l = create_tmp_var (unsigned_type_node);
10933 gimple_seq clist = NULL, mdlist = NULL;
10934 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10935 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10936 gimple_seq scan1_list = NULL, input2_list = NULL;
10937 gimple_seq last_list = NULL, reduc_list = NULL;
10938 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10939 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10940 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10942 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10943 tree var = OMP_CLAUSE_DECL (c);
10944 tree new_var = lookup_decl (var, ctx);
10945 tree var3 = NULL_TREE;
10946 tree new_vard = new_var;
10947 if (omp_privatize_by_reference (var))
10948 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10949 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10951 var3 = maybe_lookup_decl (new_vard, ctx);
10952 if (var3 == new_vard)
10953 var3 = NULL_TREE;
10956 tree ptype = build_pointer_type (TREE_TYPE (new_var));
10957 tree rpriva = create_tmp_var (ptype);
10958 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10959 OMP_CLAUSE_DECL (nc) = rpriva;
10960 *cp1 = nc;
10961 cp1 = &OMP_CLAUSE_CHAIN (nc);
10963 tree rprivb = create_tmp_var (ptype);
10964 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10965 OMP_CLAUSE_DECL (nc) = rprivb;
10966 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10967 *cp1 = nc;
10968 cp1 = &OMP_CLAUSE_CHAIN (nc);
10970 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10971 if (new_vard != new_var)
10972 TREE_ADDRESSABLE (var2) = 1;
10973 gimple_add_tmp_var (var2);
10975 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
10976 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10977 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10978 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10979 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
10981 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
10982 thread_num, integer_minus_one_node);
10983 x = fold_convert_loc (clause_loc, sizetype, x);
10984 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10985 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10986 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10987 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
10989 x = fold_convert_loc (clause_loc, sizetype, l);
10990 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10991 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10992 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10993 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
10995 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
10996 x = fold_convert_loc (clause_loc, sizetype, x);
10997 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10998 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10999 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11000 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
11002 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
11003 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11004 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
11005 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
11007 tree var4 = is_for_simd ? new_var : var2;
11008 tree var5 = NULL_TREE, var6 = NULL_TREE;
11009 if (is_for_simd)
11011 var5 = lookup_decl (var, input_simd_ctx);
11012 var6 = lookup_decl (var, scan_simd_ctx);
11013 if (new_vard != new_var)
11015 var5 = build_simple_mem_ref_loc (clause_loc, var5);
11016 var6 = build_simple_mem_ref_loc (clause_loc, var6);
11019 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11021 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
11022 tree val = var2;
11024 x = lang_hooks.decls.omp_clause_default_ctor
11025 (c, var2, build_outer_var_ref (var, ctx));
11026 if (x)
11027 gimplify_and_add (x, &clist);
11029 x = build_outer_var_ref (var, ctx);
11030 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
11032 gimplify_and_add (x, &thr01_list);
11034 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
11035 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
11036 if (var3)
11038 x = unshare_expr (var4);
11039 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11040 gimplify_and_add (x, &thrn1_list);
11041 x = unshare_expr (var4);
11042 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11043 gimplify_and_add (x, &thr02_list);
11045 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
11047 /* Otherwise, assign to it the identity element. */
11048 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11049 tseq = copy_gimple_seq_and_replace_locals (tseq);
11050 if (!is_for_simd)
11052 if (new_vard != new_var)
11053 val = build_fold_addr_expr_loc (clause_loc, val);
11054 SET_DECL_VALUE_EXPR (new_vard, val);
11055 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11057 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11058 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11059 lower_omp (&tseq, ctx);
11060 gimple_seq_add_seq (&thrn1_list, tseq);
11061 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11062 lower_omp (&tseq, ctx);
11063 gimple_seq_add_seq (&thr02_list, tseq);
11064 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11065 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11066 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11067 if (y)
11068 SET_DECL_VALUE_EXPR (new_vard, y);
11069 else
11071 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11072 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11076 x = unshare_expr (var4);
11077 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11078 gimplify_and_add (x, &thrn2_list);
11080 if (is_for_simd)
11082 x = unshare_expr (rprivb_ref);
11083 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11084 gimplify_and_add (x, &scan1_list);
11086 else
11088 if (ctx->scan_exclusive)
11090 x = unshare_expr (rprivb_ref);
11091 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11092 gimplify_and_add (x, &scan1_list);
11095 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11096 tseq = copy_gimple_seq_and_replace_locals (tseq);
11097 SET_DECL_VALUE_EXPR (placeholder, var2);
11098 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11099 lower_omp (&tseq, ctx);
11100 gimple_seq_add_seq (&scan1_list, tseq);
11102 if (ctx->scan_inclusive)
11104 x = unshare_expr (rprivb_ref);
11105 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11106 gimplify_and_add (x, &scan1_list);
11110 x = unshare_expr (rpriva_ref);
11111 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11112 unshare_expr (var4));
11113 gimplify_and_add (x, &mdlist);
11115 x = unshare_expr (is_for_simd ? var6 : new_var);
11116 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11117 gimplify_and_add (x, &input2_list);
11119 val = rprivb_ref;
11120 if (new_vard != new_var)
11121 val = build_fold_addr_expr_loc (clause_loc, val);
11123 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11124 tseq = copy_gimple_seq_and_replace_locals (tseq);
11125 SET_DECL_VALUE_EXPR (new_vard, val);
11126 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11127 if (is_for_simd)
11129 SET_DECL_VALUE_EXPR (placeholder, var6);
11130 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11132 else
11133 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11134 lower_omp (&tseq, ctx);
11135 if (y)
11136 SET_DECL_VALUE_EXPR (new_vard, y);
11137 else
11139 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11140 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11142 if (!is_for_simd)
11144 SET_DECL_VALUE_EXPR (placeholder, new_var);
11145 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11146 lower_omp (&tseq, ctx);
11148 gimple_seq_add_seq (&input2_list, tseq);
11150 x = build_outer_var_ref (var, ctx);
11151 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11152 gimplify_and_add (x, &last_list);
11154 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11155 gimplify_and_add (x, &reduc_list);
11156 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11157 tseq = copy_gimple_seq_and_replace_locals (tseq);
11158 val = rprival_ref;
11159 if (new_vard != new_var)
11160 val = build_fold_addr_expr_loc (clause_loc, val);
11161 SET_DECL_VALUE_EXPR (new_vard, val);
11162 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11163 SET_DECL_VALUE_EXPR (placeholder, var2);
11164 lower_omp (&tseq, ctx);
11165 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11166 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11167 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11168 if (y)
11169 SET_DECL_VALUE_EXPR (new_vard, y);
11170 else
11172 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11173 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11175 gimple_seq_add_seq (&reduc_list, tseq);
11176 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11177 gimplify_and_add (x, &reduc_list);
11179 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11180 if (x)
11181 gimplify_and_add (x, dlist);
11183 else
11185 x = build_outer_var_ref (var, ctx);
11186 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11188 x = omp_reduction_init (c, TREE_TYPE (new_var));
11189 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11190 &thrn1_list);
11191 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11193 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11195 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11196 if (code == MINUS_EXPR)
11197 code = PLUS_EXPR;
11199 if (is_for_simd)
11200 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11201 else
11203 if (ctx->scan_exclusive)
11204 gimplify_assign (unshare_expr (rprivb_ref), var2,
11205 &scan1_list);
11206 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11207 gimplify_assign (var2, x, &scan1_list);
11208 if (ctx->scan_inclusive)
11209 gimplify_assign (unshare_expr (rprivb_ref), var2,
11210 &scan1_list);
11213 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11214 &mdlist);
11216 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11217 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11219 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11220 &last_list);
11222 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11223 unshare_expr (rprival_ref));
11224 gimplify_assign (rprival_ref, x, &reduc_list);
11228 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11229 gimple_seq_add_stmt (&scan1_list, g);
11230 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11231 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11232 ? scan_stmt4 : scan_stmt2), g);
11234 tree controlb = create_tmp_var (boolean_type_node);
11235 tree controlp = create_tmp_var (ptr_type_node);
11236 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11237 OMP_CLAUSE_DECL (nc) = controlb;
11238 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11239 *cp1 = nc;
11240 cp1 = &OMP_CLAUSE_CHAIN (nc);
11241 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11242 OMP_CLAUSE_DECL (nc) = controlp;
11243 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11244 *cp1 = nc;
11245 cp1 = &OMP_CLAUSE_CHAIN (nc);
11246 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11247 OMP_CLAUSE_DECL (nc) = controlb;
11248 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11249 *cp2 = nc;
11250 cp2 = &OMP_CLAUSE_CHAIN (nc);
11251 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11252 OMP_CLAUSE_DECL (nc) = controlp;
11253 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11254 *cp2 = nc;
11255 cp2 = &OMP_CLAUSE_CHAIN (nc);
11257 *cp1 = gimple_omp_for_clauses (stmt);
11258 gimple_omp_for_set_clauses (stmt, new_clauses1);
11259 *cp2 = gimple_omp_for_clauses (new_stmt);
11260 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11262 if (is_for_simd)
11264 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11265 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11267 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11268 GSI_SAME_STMT);
11269 gsi_remove (&input3_gsi, true);
11270 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11271 GSI_SAME_STMT);
11272 gsi_remove (&scan3_gsi, true);
11273 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11274 GSI_SAME_STMT);
11275 gsi_remove (&input4_gsi, true);
11276 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11277 GSI_SAME_STMT);
11278 gsi_remove (&scan4_gsi, true);
11280 else
11282 gimple_omp_set_body (scan_stmt1, scan1_list);
11283 gimple_omp_set_body (input_stmt2, input2_list);
11286 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11287 GSI_SAME_STMT);
11288 gsi_remove (&input1_gsi, true);
11289 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11290 GSI_SAME_STMT);
11291 gsi_remove (&scan1_gsi, true);
11292 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11293 GSI_SAME_STMT);
11294 gsi_remove (&input2_gsi, true);
11295 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11296 GSI_SAME_STMT);
11297 gsi_remove (&scan2_gsi, true);
11299 gimple_seq_add_seq (body_p, clist);
11301 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11302 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11303 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11304 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11305 gimple_seq_add_stmt (body_p, g);
11306 g = gimple_build_label (lab1);
11307 gimple_seq_add_stmt (body_p, g);
11308 gimple_seq_add_seq (body_p, thr01_list);
11309 g = gimple_build_goto (lab3);
11310 gimple_seq_add_stmt (body_p, g);
11311 g = gimple_build_label (lab2);
11312 gimple_seq_add_stmt (body_p, g);
11313 gimple_seq_add_seq (body_p, thrn1_list);
11314 g = gimple_build_label (lab3);
11315 gimple_seq_add_stmt (body_p, g);
11317 g = gimple_build_assign (ivar, size_zero_node);
11318 gimple_seq_add_stmt (body_p, g);
11320 gimple_seq_add_stmt (body_p, stmt);
11321 gimple_seq_add_seq (body_p, body);
11322 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11323 fd->loop.v));
11325 g = gimple_build_omp_return (true);
11326 gimple_seq_add_stmt (body_p, g);
11327 gimple_seq_add_seq (body_p, mdlist);
11329 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11330 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11331 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11332 gimple_seq_add_stmt (body_p, g);
11333 g = gimple_build_label (lab1);
11334 gimple_seq_add_stmt (body_p, g);
11336 g = omp_build_barrier (NULL);
11337 gimple_seq_add_stmt (body_p, g);
11339 tree down = create_tmp_var (unsigned_type_node);
11340 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11341 gimple_seq_add_stmt (body_p, g);
11343 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11344 gimple_seq_add_stmt (body_p, g);
11346 tree num_threadsu = create_tmp_var (unsigned_type_node);
11347 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11348 gimple_seq_add_stmt (body_p, g);
11350 tree thread_numu = create_tmp_var (unsigned_type_node);
11351 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11352 gimple_seq_add_stmt (body_p, g);
11354 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11355 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11356 build_int_cst (unsigned_type_node, 1));
11357 gimple_seq_add_stmt (body_p, g);
11359 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11360 g = gimple_build_label (lab3);
11361 gimple_seq_add_stmt (body_p, g);
11363 tree twok = create_tmp_var (unsigned_type_node);
11364 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11365 gimple_seq_add_stmt (body_p, g);
11367 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11368 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11369 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11370 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11371 gimple_seq_add_stmt (body_p, g);
11372 g = gimple_build_label (lab4);
11373 gimple_seq_add_stmt (body_p, g);
11374 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11375 gimple_seq_add_stmt (body_p, g);
11376 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11377 gimple_seq_add_stmt (body_p, g);
11379 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11380 gimple_seq_add_stmt (body_p, g);
11381 g = gimple_build_label (lab6);
11382 gimple_seq_add_stmt (body_p, g);
11384 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11385 gimple_seq_add_stmt (body_p, g);
11387 g = gimple_build_label (lab5);
11388 gimple_seq_add_stmt (body_p, g);
11390 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11391 gimple_seq_add_stmt (body_p, g);
11393 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11394 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11395 gimple_call_set_lhs (g, cplx);
11396 gimple_seq_add_stmt (body_p, g);
11397 tree mul = create_tmp_var (unsigned_type_node);
11398 g = gimple_build_assign (mul, REALPART_EXPR,
11399 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11400 gimple_seq_add_stmt (body_p, g);
11401 tree ovf = create_tmp_var (unsigned_type_node);
11402 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11403 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11404 gimple_seq_add_stmt (body_p, g);
11406 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11407 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11408 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11409 lab7, lab8);
11410 gimple_seq_add_stmt (body_p, g);
11411 g = gimple_build_label (lab7);
11412 gimple_seq_add_stmt (body_p, g);
11414 tree andv = create_tmp_var (unsigned_type_node);
11415 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11416 gimple_seq_add_stmt (body_p, g);
11417 tree andvm1 = create_tmp_var (unsigned_type_node);
11418 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11419 build_minus_one_cst (unsigned_type_node));
11420 gimple_seq_add_stmt (body_p, g);
11422 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11423 gimple_seq_add_stmt (body_p, g);
11425 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11426 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11427 gimple_seq_add_stmt (body_p, g);
11428 g = gimple_build_label (lab9);
11429 gimple_seq_add_stmt (body_p, g);
11430 gimple_seq_add_seq (body_p, reduc_list);
11431 g = gimple_build_label (lab8);
11432 gimple_seq_add_stmt (body_p, g);
11434 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11435 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11436 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11437 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11438 lab10, lab11);
11439 gimple_seq_add_stmt (body_p, g);
11440 g = gimple_build_label (lab10);
11441 gimple_seq_add_stmt (body_p, g);
11442 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11443 gimple_seq_add_stmt (body_p, g);
11444 g = gimple_build_goto (lab12);
11445 gimple_seq_add_stmt (body_p, g);
11446 g = gimple_build_label (lab11);
11447 gimple_seq_add_stmt (body_p, g);
11448 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11449 gimple_seq_add_stmt (body_p, g);
11450 g = gimple_build_label (lab12);
11451 gimple_seq_add_stmt (body_p, g);
11453 g = omp_build_barrier (NULL);
11454 gimple_seq_add_stmt (body_p, g);
11456 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11457 lab3, lab2);
11458 gimple_seq_add_stmt (body_p, g);
11460 g = gimple_build_label (lab2);
11461 gimple_seq_add_stmt (body_p, g);
11463 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11464 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11465 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11466 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11467 gimple_seq_add_stmt (body_p, g);
11468 g = gimple_build_label (lab1);
11469 gimple_seq_add_stmt (body_p, g);
11470 gimple_seq_add_seq (body_p, thr02_list);
11471 g = gimple_build_goto (lab3);
11472 gimple_seq_add_stmt (body_p, g);
11473 g = gimple_build_label (lab2);
11474 gimple_seq_add_stmt (body_p, g);
11475 gimple_seq_add_seq (body_p, thrn2_list);
11476 g = gimple_build_label (lab3);
11477 gimple_seq_add_stmt (body_p, g);
11479 g = gimple_build_assign (ivar, size_zero_node);
11480 gimple_seq_add_stmt (body_p, g);
11481 gimple_seq_add_stmt (body_p, new_stmt);
11482 gimple_seq_add_seq (body_p, new_body);
11484 gimple_seq new_dlist = NULL;
11485 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11486 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11487 tree num_threadsm1 = create_tmp_var (integer_type_node);
11488 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11489 integer_minus_one_node);
11490 gimple_seq_add_stmt (&new_dlist, g);
11491 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11492 gimple_seq_add_stmt (&new_dlist, g);
11493 g = gimple_build_label (lab1);
11494 gimple_seq_add_stmt (&new_dlist, g);
11495 gimple_seq_add_seq (&new_dlist, last_list);
11496 g = gimple_build_label (lab2);
11497 gimple_seq_add_stmt (&new_dlist, g);
11498 gimple_seq_add_seq (&new_dlist, *dlist);
11499 *dlist = new_dlist;
11502 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11503 the addresses of variables to be made private at the surrounding
11504 parallelism level. Such functions appear in the gimple code stream in two
11505 forms, e.g. for a partitioned loop:
11507 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11508 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11509 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11510 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11512 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11513 not as part of a HEAD_MARK sequence:
11515 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11517 For such stand-alone appearances, the 3rd argument is always 0, denoting
11518 gang partitioning. */
11520 static gcall *
11521 lower_oacc_private_marker (omp_context *ctx)
11523 if (ctx->oacc_privatization_candidates.length () == 0)
11524 return NULL;
11526 auto_vec<tree, 5> args;
11528 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11529 args.quick_push (integer_zero_node);
11530 args.quick_push (integer_minus_one_node);
11532 int i;
11533 tree decl;
11534 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11536 gcc_checking_assert (TREE_ADDRESSABLE (decl));
11537 tree addr = build_fold_addr_expr (decl);
11538 args.safe_push (addr);
11541 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11544 /* Lower code for an OMP loop directive. */
11546 static void
11547 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11549 tree *rhs_p, block;
11550 struct omp_for_data fd, *fdp = NULL;
11551 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11552 gbind *new_stmt;
11553 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11554 gimple_seq cnt_list = NULL, clist = NULL;
11555 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11556 size_t i;
11558 push_gimplify_context ();
11560 if (is_gimple_omp_oacc (ctx->stmt))
11561 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11563 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11565 block = make_node (BLOCK);
11566 new_stmt = gimple_build_bind (NULL, NULL, block);
11567 /* Replace at gsi right away, so that 'stmt' is no member
11568 of a sequence anymore as we're going to add to a different
11569 one below. */
11570 gsi_replace (gsi_p, new_stmt, true);
11572 /* Move declaration of temporaries in the loop body before we make
11573 it go away. */
11574 omp_for_body = gimple_omp_body (stmt);
11575 if (!gimple_seq_empty_p (omp_for_body)
11576 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11578 gbind *inner_bind
11579 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11580 tree vars = gimple_bind_vars (inner_bind);
11581 if (is_gimple_omp_oacc (ctx->stmt))
11582 oacc_privatization_scan_decl_chain (ctx, vars);
11583 gimple_bind_append_vars (new_stmt, vars);
11584 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11585 keep them on the inner_bind and it's block. */
11586 gimple_bind_set_vars (inner_bind, NULL_TREE);
11587 if (gimple_bind_block (inner_bind))
11588 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11591 if (gimple_omp_for_combined_into_p (stmt))
11593 omp_extract_for_data (stmt, &fd, NULL);
11594 fdp = &fd;
11596 /* We need two temporaries with fd.loop.v type (istart/iend)
11597 and then (fd.collapse - 1) temporaries with the same
11598 type for count2 ... countN-1 vars if not constant. */
11599 size_t count = 2;
11600 tree type = fd.iter_type;
11601 if (fd.collapse > 1
11602 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11603 count += fd.collapse - 1;
11604 size_t count2 = 0;
11605 tree type2 = NULL_TREE;
11606 bool taskreg_for
11607 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11608 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11609 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11610 tree simtc = NULL;
11611 tree clauses = *pc;
11612 if (fd.collapse > 1
11613 && fd.non_rect
11614 && fd.last_nonrect == fd.first_nonrect + 1
11615 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11616 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11617 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11619 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11620 type2 = TREE_TYPE (v);
11621 count++;
11622 count2 = 3;
11624 if (taskreg_for)
11625 outerc
11626 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11627 OMP_CLAUSE__LOOPTEMP_);
11628 if (ctx->simt_stmt)
11629 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11630 OMP_CLAUSE__LOOPTEMP_);
11631 for (i = 0; i < count + count2; i++)
11633 tree temp;
11634 if (taskreg_for)
11636 gcc_assert (outerc);
11637 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11638 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11639 OMP_CLAUSE__LOOPTEMP_);
11641 else
11643 /* If there are 2 adjacent SIMD stmts, one with _simt_
11644 clause, another without, make sure they have the same
11645 decls in _looptemp_ clauses, because the outer stmt
11646 they are combined into will look up just one inner_stmt. */
11647 if (ctx->simt_stmt)
11648 temp = OMP_CLAUSE_DECL (simtc);
11649 else
11650 temp = create_tmp_var (i >= count ? type2 : type);
11651 insert_decl_map (&ctx->outer->cb, temp, temp);
11653 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11654 OMP_CLAUSE_DECL (*pc) = temp;
11655 pc = &OMP_CLAUSE_CHAIN (*pc);
11656 if (ctx->simt_stmt)
11657 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11658 OMP_CLAUSE__LOOPTEMP_);
11660 *pc = clauses;
11663 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11664 dlist = NULL;
11665 body = NULL;
11666 tree rclauses
11667 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11668 OMP_CLAUSE_REDUCTION);
11669 tree rtmp = NULL_TREE;
11670 if (rclauses)
11672 tree type = build_pointer_type (pointer_sized_int_node);
11673 tree temp = create_tmp_var (type);
11674 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11675 OMP_CLAUSE_DECL (c) = temp;
11676 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11677 gimple_omp_for_set_clauses (stmt, c);
11678 lower_omp_task_reductions (ctx, OMP_FOR,
11679 gimple_omp_for_clauses (stmt),
11680 &tred_ilist, &tred_dlist);
11681 rclauses = c;
11682 rtmp = make_ssa_name (type);
11683 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11686 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11687 ctx);
11689 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11690 fdp);
11691 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11692 gimple_omp_for_pre_body (stmt));
11694 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11696 gcall *private_marker = NULL;
11697 if (is_gimple_omp_oacc (ctx->stmt)
11698 && !gimple_seq_empty_p (omp_for_body))
11699 private_marker = lower_oacc_private_marker (ctx);
11701 /* Lower the header expressions. At this point, we can assume that
11702 the header is of the form:
11704 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11706 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11707 using the .omp_data_s mapping, if needed. */
11708 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11710 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11711 if (TREE_CODE (*rhs_p) == TREE_VEC)
11713 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11714 TREE_VEC_ELT (*rhs_p, 1)
11715 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11716 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11717 TREE_VEC_ELT (*rhs_p, 2)
11718 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11720 else if (!is_gimple_min_invariant (*rhs_p))
11721 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11722 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11723 recompute_tree_invariant_for_addr_expr (*rhs_p);
11725 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11726 if (TREE_CODE (*rhs_p) == TREE_VEC)
11728 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11729 TREE_VEC_ELT (*rhs_p, 1)
11730 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11731 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11732 TREE_VEC_ELT (*rhs_p, 2)
11733 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11735 else if (!is_gimple_min_invariant (*rhs_p))
11736 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11737 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11738 recompute_tree_invariant_for_addr_expr (*rhs_p);
11740 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11741 if (!is_gimple_min_invariant (*rhs_p))
11742 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11744 if (rclauses)
11745 gimple_seq_add_seq (&tred_ilist, cnt_list);
11746 else
11747 gimple_seq_add_seq (&body, cnt_list);
11749 /* Once lowered, extract the bounds and clauses. */
11750 omp_extract_for_data (stmt, &fd, NULL);
11752 if (is_gimple_omp_oacc (ctx->stmt)
11753 && !ctx_in_oacc_kernels_region (ctx))
11754 lower_oacc_head_tail (gimple_location (stmt),
11755 gimple_omp_for_clauses (stmt), private_marker,
11756 &oacc_head, &oacc_tail, ctx);
11758 /* Add OpenACC partitioning and reduction markers just before the loop. */
11759 if (oacc_head)
11760 gimple_seq_add_seq (&body, oacc_head);
11762 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11764 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11765 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11766 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11767 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11769 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11770 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11771 OMP_CLAUSE_LINEAR_STEP (c)
11772 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11773 ctx);
11776 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11777 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11778 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11779 else
11781 gimple_seq_add_stmt (&body, stmt);
11782 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11785 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11786 fd.loop.v));
11788 /* After the loop, add exit clauses. */
11789 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11791 if (clist)
11793 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11794 gcall *g = gimple_build_call (fndecl, 0);
11795 gimple_seq_add_stmt (&body, g);
11796 gimple_seq_add_seq (&body, clist);
11797 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11798 g = gimple_build_call (fndecl, 0);
11799 gimple_seq_add_stmt (&body, g);
11802 if (ctx->cancellable)
11803 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11805 gimple_seq_add_seq (&body, dlist);
11807 if (rclauses)
11809 gimple_seq_add_seq (&tred_ilist, body);
11810 body = tred_ilist;
11813 body = maybe_catch_exception (body);
11815 /* Region exit marker goes at the end of the loop body. */
11816 gimple *g = gimple_build_omp_return (fd.have_nowait);
11817 gimple_seq_add_stmt (&body, g);
11819 gimple_seq_add_seq (&body, tred_dlist);
11821 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11823 if (rclauses)
11824 OMP_CLAUSE_DECL (rclauses) = rtmp;
11826 /* Add OpenACC joining and reduction markers just after the loop. */
11827 if (oacc_tail)
11828 gimple_seq_add_seq (&body, oacc_tail);
11830 pop_gimplify_context (new_stmt);
11832 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11833 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11834 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11835 if (BLOCK_VARS (block))
11836 TREE_USED (block) = 1;
11838 gimple_bind_set_body (new_stmt, body);
11839 gimple_omp_set_body (stmt, NULL);
11840 gimple_omp_for_set_pre_body (stmt, NULL);
11843 /* Callback for walk_stmts. Check if the current statement only contains
11844 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11846 static tree
11847 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11848 bool *handled_ops_p,
11849 struct walk_stmt_info *wi)
11851 int *info = (int *) wi->info;
11852 gimple *stmt = gsi_stmt (*gsi_p);
11854 *handled_ops_p = true;
11855 switch (gimple_code (stmt))
11857 WALK_SUBSTMTS;
11859 case GIMPLE_DEBUG:
11860 break;
11861 case GIMPLE_OMP_FOR:
11862 case GIMPLE_OMP_SECTIONS:
11863 *info = *info == 0 ? 1 : -1;
11864 break;
11865 default:
11866 *info = -1;
11867 break;
11869 return NULL;
11872 struct omp_taskcopy_context
11874 /* This field must be at the beginning, as we do "inheritance": Some
11875 callback functions for tree-inline.cc (e.g., omp_copy_decl)
11876 receive a copy_body_data pointer that is up-casted to an
11877 omp_context pointer. */
11878 copy_body_data cb;
11879 omp_context *ctx;
11882 static tree
11883 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11885 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11887 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11888 return create_tmp_var (TREE_TYPE (var));
11890 return var;
11893 static tree
11894 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11896 tree name, new_fields = NULL, type, f;
11898 type = lang_hooks.types.make_type (RECORD_TYPE);
11899 name = DECL_NAME (TYPE_NAME (orig_type));
11900 name = build_decl (gimple_location (tcctx->ctx->stmt),
11901 TYPE_DECL, name, type);
11902 TYPE_NAME (type) = name;
11904 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11906 tree new_f = copy_node (f);
11907 DECL_CONTEXT (new_f) = type;
11908 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11909 TREE_CHAIN (new_f) = new_fields;
11910 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11911 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11912 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11913 &tcctx->cb, NULL);
11914 new_fields = new_f;
11915 tcctx->cb.decl_map->put (f, new_f);
11917 TYPE_FIELDS (type) = nreverse (new_fields);
11918 layout_type (type);
11919 return type;
11922 /* Create task copyfn. */
11924 static void
11925 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11927 struct function *child_cfun;
11928 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11929 tree record_type, srecord_type, bind, list;
11930 bool record_needs_remap = false, srecord_needs_remap = false;
11931 splay_tree_node n;
11932 struct omp_taskcopy_context tcctx;
11933 location_t loc = gimple_location (task_stmt);
11934 size_t looptempno = 0;
11936 child_fn = gimple_omp_task_copy_fn (task_stmt);
11937 task_cpyfns.safe_push (task_stmt);
11938 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11939 gcc_assert (child_cfun->cfg == NULL);
11940 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11942 /* Reset DECL_CONTEXT on function arguments. */
11943 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11944 DECL_CONTEXT (t) = child_fn;
11946 /* Populate the function. */
11947 push_gimplify_context ();
11948 push_cfun (child_cfun);
11950 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11951 TREE_SIDE_EFFECTS (bind) = 1;
11952 list = NULL;
11953 DECL_SAVED_TREE (child_fn) = bind;
11954 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
11956 /* Remap src and dst argument types if needed. */
11957 record_type = ctx->record_type;
11958 srecord_type = ctx->srecord_type;
11959 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
11960 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11962 record_needs_remap = true;
11963 break;
11965 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
11966 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11968 srecord_needs_remap = true;
11969 break;
11972 if (record_needs_remap || srecord_needs_remap)
11974 memset (&tcctx, '\0', sizeof (tcctx));
11975 tcctx.cb.src_fn = ctx->cb.src_fn;
11976 tcctx.cb.dst_fn = child_fn;
11977 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
11978 gcc_checking_assert (tcctx.cb.src_node);
11979 tcctx.cb.dst_node = tcctx.cb.src_node;
11980 tcctx.cb.src_cfun = ctx->cb.src_cfun;
11981 tcctx.cb.copy_decl = task_copyfn_copy_decl;
11982 tcctx.cb.eh_lp_nr = 0;
11983 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
11984 tcctx.cb.decl_map = new hash_map<tree, tree>;
11985 tcctx.ctx = ctx;
11987 if (record_needs_remap)
11988 record_type = task_copyfn_remap_type (&tcctx, record_type);
11989 if (srecord_needs_remap)
11990 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
11992 else
11993 tcctx.cb.decl_map = NULL;
11995 arg = DECL_ARGUMENTS (child_fn);
11996 TREE_TYPE (arg) = build_pointer_type (record_type);
11997 sarg = DECL_CHAIN (arg);
11998 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
12000 /* First pass: initialize temporaries used in record_type and srecord_type
12001 sizes and field offsets. */
12002 if (tcctx.cb.decl_map)
12003 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12004 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12006 tree *p;
12008 decl = OMP_CLAUSE_DECL (c);
12009 p = tcctx.cb.decl_map->get (decl);
12010 if (p == NULL)
12011 continue;
12012 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12013 sf = (tree) n->value;
12014 sf = *tcctx.cb.decl_map->get (sf);
12015 src = build_simple_mem_ref_loc (loc, sarg);
12016 src = omp_build_component_ref (src, sf);
12017 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
12018 append_to_statement_list (t, &list);
12021 /* Second pass: copy shared var pointers and copy construct non-VLA
12022 firstprivate vars. */
12023 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12024 switch (OMP_CLAUSE_CODE (c))
12026 splay_tree_key key;
12027 case OMP_CLAUSE_SHARED:
12028 decl = OMP_CLAUSE_DECL (c);
12029 key = (splay_tree_key) decl;
12030 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
12031 key = (splay_tree_key) &DECL_UID (decl);
12032 n = splay_tree_lookup (ctx->field_map, key);
12033 if (n == NULL)
12034 break;
12035 f = (tree) n->value;
12036 if (tcctx.cb.decl_map)
12037 f = *tcctx.cb.decl_map->get (f);
12038 n = splay_tree_lookup (ctx->sfield_map, key);
12039 sf = (tree) n->value;
12040 if (tcctx.cb.decl_map)
12041 sf = *tcctx.cb.decl_map->get (sf);
12042 src = build_simple_mem_ref_loc (loc, sarg);
12043 src = omp_build_component_ref (src, sf);
12044 dst = build_simple_mem_ref_loc (loc, arg);
12045 dst = omp_build_component_ref (dst, f);
12046 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12047 append_to_statement_list (t, &list);
12048 break;
12049 case OMP_CLAUSE_REDUCTION:
12050 case OMP_CLAUSE_IN_REDUCTION:
12051 decl = OMP_CLAUSE_DECL (c);
12052 if (TREE_CODE (decl) == MEM_REF)
12054 decl = TREE_OPERAND (decl, 0);
12055 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12056 decl = TREE_OPERAND (decl, 0);
12057 if (TREE_CODE (decl) == INDIRECT_REF
12058 || TREE_CODE (decl) == ADDR_EXPR)
12059 decl = TREE_OPERAND (decl, 0);
12061 key = (splay_tree_key) decl;
12062 n = splay_tree_lookup (ctx->field_map, key);
12063 if (n == NULL)
12064 break;
12065 f = (tree) n->value;
12066 if (tcctx.cb.decl_map)
12067 f = *tcctx.cb.decl_map->get (f);
12068 n = splay_tree_lookup (ctx->sfield_map, key);
12069 sf = (tree) n->value;
12070 if (tcctx.cb.decl_map)
12071 sf = *tcctx.cb.decl_map->get (sf);
12072 src = build_simple_mem_ref_loc (loc, sarg);
12073 src = omp_build_component_ref (src, sf);
12074 if (decl != OMP_CLAUSE_DECL (c)
12075 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12076 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12077 src = build_simple_mem_ref_loc (loc, src);
12078 dst = build_simple_mem_ref_loc (loc, arg);
12079 dst = omp_build_component_ref (dst, f);
12080 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12081 append_to_statement_list (t, &list);
12082 break;
12083 case OMP_CLAUSE__LOOPTEMP_:
12084 /* Fields for first two _looptemp_ clauses are initialized by
12085 GOMP_taskloop*, the rest are handled like firstprivate. */
12086 if (looptempno < 2)
12088 looptempno++;
12089 break;
12091 /* FALLTHRU */
12092 case OMP_CLAUSE__REDUCTEMP_:
12093 case OMP_CLAUSE_FIRSTPRIVATE:
12094 decl = OMP_CLAUSE_DECL (c);
12095 if (is_variable_sized (decl))
12096 break;
12097 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12098 if (n == NULL)
12099 break;
12100 f = (tree) n->value;
12101 if (tcctx.cb.decl_map)
12102 f = *tcctx.cb.decl_map->get (f);
12103 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12104 if (n != NULL)
12106 sf = (tree) n->value;
12107 if (tcctx.cb.decl_map)
12108 sf = *tcctx.cb.decl_map->get (sf);
12109 src = build_simple_mem_ref_loc (loc, sarg);
12110 src = omp_build_component_ref (src, sf);
12111 if (use_pointer_for_field (decl, NULL)
12112 || omp_privatize_by_reference (decl))
12113 src = build_simple_mem_ref_loc (loc, src);
12115 else
12116 src = decl;
12117 dst = build_simple_mem_ref_loc (loc, arg);
12118 dst = omp_build_component_ref (dst, f);
12119 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12120 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12121 else
12123 if (ctx->allocate_map)
12124 if (tree *allocatorp = ctx->allocate_map->get (decl))
12126 tree allocator = *allocatorp;
12127 HOST_WIDE_INT ialign = 0;
12128 if (TREE_CODE (allocator) == TREE_LIST)
12130 ialign = tree_to_uhwi (TREE_VALUE (allocator));
12131 allocator = TREE_PURPOSE (allocator);
12133 if (TREE_CODE (allocator) != INTEGER_CST)
12135 n = splay_tree_lookup (ctx->sfield_map,
12136 (splay_tree_key) allocator);
12137 allocator = (tree) n->value;
12138 if (tcctx.cb.decl_map)
12139 allocator = *tcctx.cb.decl_map->get (allocator);
12140 tree a = build_simple_mem_ref_loc (loc, sarg);
12141 allocator = omp_build_component_ref (a, allocator);
12143 allocator = fold_convert (pointer_sized_int_node, allocator);
12144 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12145 tree align = build_int_cst (size_type_node,
12146 MAX (ialign,
12147 DECL_ALIGN_UNIT (decl)));
12148 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12149 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12150 allocator);
12151 ptr = fold_convert (TREE_TYPE (dst), ptr);
12152 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12153 append_to_statement_list (t, &list);
12154 dst = build_simple_mem_ref_loc (loc, dst);
12156 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12158 append_to_statement_list (t, &list);
12159 break;
12160 case OMP_CLAUSE_PRIVATE:
12161 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12162 break;
12163 decl = OMP_CLAUSE_DECL (c);
12164 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12165 f = (tree) n->value;
12166 if (tcctx.cb.decl_map)
12167 f = *tcctx.cb.decl_map->get (f);
12168 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12169 if (n != NULL)
12171 sf = (tree) n->value;
12172 if (tcctx.cb.decl_map)
12173 sf = *tcctx.cb.decl_map->get (sf);
12174 src = build_simple_mem_ref_loc (loc, sarg);
12175 src = omp_build_component_ref (src, sf);
12176 if (use_pointer_for_field (decl, NULL))
12177 src = build_simple_mem_ref_loc (loc, src);
12179 else
12180 src = decl;
12181 dst = build_simple_mem_ref_loc (loc, arg);
12182 dst = omp_build_component_ref (dst, f);
12183 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12184 append_to_statement_list (t, &list);
12185 break;
12186 default:
12187 break;
12190 /* Last pass: handle VLA firstprivates. */
12191 if (tcctx.cb.decl_map)
12192 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12193 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12195 tree ind, ptr, df;
12197 decl = OMP_CLAUSE_DECL (c);
12198 if (!is_variable_sized (decl))
12199 continue;
12200 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12201 if (n == NULL)
12202 continue;
12203 f = (tree) n->value;
12204 f = *tcctx.cb.decl_map->get (f);
12205 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12206 ind = DECL_VALUE_EXPR (decl);
12207 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12208 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12209 n = splay_tree_lookup (ctx->sfield_map,
12210 (splay_tree_key) TREE_OPERAND (ind, 0));
12211 sf = (tree) n->value;
12212 sf = *tcctx.cb.decl_map->get (sf);
12213 src = build_simple_mem_ref_loc (loc, sarg);
12214 src = omp_build_component_ref (src, sf);
12215 src = build_simple_mem_ref_loc (loc, src);
12216 dst = build_simple_mem_ref_loc (loc, arg);
12217 dst = omp_build_component_ref (dst, f);
12218 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12219 append_to_statement_list (t, &list);
12220 n = splay_tree_lookup (ctx->field_map,
12221 (splay_tree_key) TREE_OPERAND (ind, 0));
12222 df = (tree) n->value;
12223 df = *tcctx.cb.decl_map->get (df);
12224 ptr = build_simple_mem_ref_loc (loc, arg);
12225 ptr = omp_build_component_ref (ptr, df);
12226 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12227 build_fold_addr_expr_loc (loc, dst));
12228 append_to_statement_list (t, &list);
12231 t = build1 (RETURN_EXPR, void_type_node, NULL);
12232 append_to_statement_list (t, &list);
12234 if (tcctx.cb.decl_map)
12235 delete tcctx.cb.decl_map;
12236 pop_gimplify_context (NULL);
12237 BIND_EXPR_BODY (bind) = list;
12238 pop_cfun ();
12241 static void
12242 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12244 tree c, clauses;
12245 gimple *g;
12246 size_t cnt[5] = { 0, 0, 0, 0, 0 }, idx = 2, i;
12248 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12249 gcc_assert (clauses);
12250 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12251 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12252 switch (OMP_CLAUSE_DEPEND_KIND (c))
12254 case OMP_CLAUSE_DEPEND_LAST:
12255 /* Lowering already done at gimplification. */
12256 return;
12257 case OMP_CLAUSE_DEPEND_IN:
12258 cnt[2]++;
12259 break;
12260 case OMP_CLAUSE_DEPEND_OUT:
12261 case OMP_CLAUSE_DEPEND_INOUT:
12262 cnt[0]++;
12263 break;
12264 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12265 cnt[1]++;
12266 break;
12267 case OMP_CLAUSE_DEPEND_DEPOBJ:
12268 cnt[3]++;
12269 break;
12270 case OMP_CLAUSE_DEPEND_INOUTSET:
12271 cnt[4]++;
12272 break;
12273 default:
12274 gcc_unreachable ();
12276 if (cnt[1] || cnt[3] || cnt[4])
12277 idx = 5;
12278 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3] + cnt[4];
12279 size_t inoutidx = total + idx;
12280 tree type = build_array_type_nelts (ptr_type_node, total + idx + 2 * cnt[4]);
12281 tree array = create_tmp_var (type);
12282 TREE_ADDRESSABLE (array) = 1;
12283 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12284 NULL_TREE);
12285 if (idx == 5)
12287 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12288 gimple_seq_add_stmt (iseq, g);
12289 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12290 NULL_TREE);
12292 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12293 gimple_seq_add_stmt (iseq, g);
12294 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12296 r = build4 (ARRAY_REF, ptr_type_node, array,
12297 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12298 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12299 gimple_seq_add_stmt (iseq, g);
12301 for (i = 0; i < 5; i++)
12303 if (cnt[i] == 0)
12304 continue;
12305 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12306 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12307 continue;
12308 else
12310 switch (OMP_CLAUSE_DEPEND_KIND (c))
12312 case OMP_CLAUSE_DEPEND_IN:
12313 if (i != 2)
12314 continue;
12315 break;
12316 case OMP_CLAUSE_DEPEND_OUT:
12317 case OMP_CLAUSE_DEPEND_INOUT:
12318 if (i != 0)
12319 continue;
12320 break;
12321 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12322 if (i != 1)
12323 continue;
12324 break;
12325 case OMP_CLAUSE_DEPEND_DEPOBJ:
12326 if (i != 3)
12327 continue;
12328 break;
12329 case OMP_CLAUSE_DEPEND_INOUTSET:
12330 if (i != 4)
12331 continue;
12332 break;
12333 default:
12334 gcc_unreachable ();
12336 tree t = OMP_CLAUSE_DECL (c);
12337 if (i == 4)
12339 t = build4 (ARRAY_REF, ptr_type_node, array,
12340 size_int (inoutidx), NULL_TREE, NULL_TREE);
12341 t = build_fold_addr_expr (t);
12342 inoutidx += 2;
12344 t = fold_convert (ptr_type_node, t);
12345 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12346 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12347 NULL_TREE, NULL_TREE);
12348 g = gimple_build_assign (r, t);
12349 gimple_seq_add_stmt (iseq, g);
12352 if (cnt[4])
12353 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12354 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12355 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_INOUTSET)
12357 tree t = OMP_CLAUSE_DECL (c);
12358 t = fold_convert (ptr_type_node, t);
12359 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12360 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12361 NULL_TREE, NULL_TREE);
12362 g = gimple_build_assign (r, t);
12363 gimple_seq_add_stmt (iseq, g);
12364 t = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
12365 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12366 NULL_TREE, NULL_TREE);
12367 g = gimple_build_assign (r, t);
12368 gimple_seq_add_stmt (iseq, g);
12371 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12372 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12373 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12374 OMP_CLAUSE_CHAIN (c) = *pclauses;
12375 *pclauses = c;
12376 tree clobber = build_clobber (type);
12377 g = gimple_build_assign (array, clobber);
12378 gimple_seq_add_stmt (oseq, g);
12381 /* Lower the OpenMP parallel or task directive in the current statement
12382 in GSI_P. CTX holds context information for the directive. */
12384 static void
12385 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12387 tree clauses;
12388 tree child_fn, t;
12389 gimple *stmt = gsi_stmt (*gsi_p);
12390 gbind *par_bind, *bind, *dep_bind = NULL;
12391 gimple_seq par_body;
12392 location_t loc = gimple_location (stmt);
12394 clauses = gimple_omp_taskreg_clauses (stmt);
12395 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12396 && gimple_omp_task_taskwait_p (stmt))
12398 par_bind = NULL;
12399 par_body = NULL;
12401 else
12403 par_bind
12404 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12405 par_body = gimple_bind_body (par_bind);
12407 child_fn = ctx->cb.dst_fn;
12408 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12409 && !gimple_omp_parallel_combined_p (stmt))
12411 struct walk_stmt_info wi;
12412 int ws_num = 0;
12414 memset (&wi, 0, sizeof (wi));
12415 wi.info = &ws_num;
12416 wi.val_only = true;
12417 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12418 if (ws_num == 1)
12419 gimple_omp_parallel_set_combined_p (stmt, true);
12421 gimple_seq dep_ilist = NULL;
12422 gimple_seq dep_olist = NULL;
12423 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12424 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12426 push_gimplify_context ();
12427 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12428 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12429 &dep_ilist, &dep_olist);
12432 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12433 && gimple_omp_task_taskwait_p (stmt))
12435 if (dep_bind)
12437 gsi_replace (gsi_p, dep_bind, true);
12438 gimple_bind_add_seq (dep_bind, dep_ilist);
12439 gimple_bind_add_stmt (dep_bind, stmt);
12440 gimple_bind_add_seq (dep_bind, dep_olist);
12441 pop_gimplify_context (dep_bind);
12443 return;
12446 if (ctx->srecord_type)
12447 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12449 gimple_seq tskred_ilist = NULL;
12450 gimple_seq tskred_olist = NULL;
12451 if ((is_task_ctx (ctx)
12452 && gimple_omp_task_taskloop_p (ctx->stmt)
12453 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12454 OMP_CLAUSE_REDUCTION))
12455 || (is_parallel_ctx (ctx)
12456 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12457 OMP_CLAUSE__REDUCTEMP_)))
12459 if (dep_bind == NULL)
12461 push_gimplify_context ();
12462 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12464 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12465 : OMP_PARALLEL,
12466 gimple_omp_taskreg_clauses (ctx->stmt),
12467 &tskred_ilist, &tskred_olist);
12470 push_gimplify_context ();
12472 gimple_seq par_olist = NULL;
12473 gimple_seq par_ilist = NULL;
12474 gimple_seq par_rlist = NULL;
12475 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12476 lower_omp (&par_body, ctx);
12477 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12478 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12480 /* Declare all the variables created by mapping and the variables
12481 declared in the scope of the parallel body. */
12482 record_vars_into (ctx->block_vars, child_fn);
12483 maybe_remove_omp_member_access_dummy_vars (par_bind);
12484 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12486 if (ctx->record_type)
12488 ctx->sender_decl
12489 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12490 : ctx->record_type, ".omp_data_o");
12491 DECL_NAMELESS (ctx->sender_decl) = 1;
12492 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12493 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12496 gimple_seq olist = NULL;
12497 gimple_seq ilist = NULL;
12498 lower_send_clauses (clauses, &ilist, &olist, ctx);
12499 lower_send_shared_vars (&ilist, &olist, ctx);
12501 if (ctx->record_type)
12503 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12504 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12505 clobber));
12508 /* Once all the expansions are done, sequence all the different
12509 fragments inside gimple_omp_body. */
12511 gimple_seq new_body = NULL;
12513 if (ctx->record_type)
12515 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12516 /* fixup_child_record_type might have changed receiver_decl's type. */
12517 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12518 gimple_seq_add_stmt (&new_body,
12519 gimple_build_assign (ctx->receiver_decl, t));
12522 gimple_seq_add_seq (&new_body, par_ilist);
12523 gimple_seq_add_seq (&new_body, par_body);
12524 gimple_seq_add_seq (&new_body, par_rlist);
12525 if (ctx->cancellable)
12526 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12527 gimple_seq_add_seq (&new_body, par_olist);
12528 new_body = maybe_catch_exception (new_body);
12529 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12530 gimple_seq_add_stmt (&new_body,
12531 gimple_build_omp_continue (integer_zero_node,
12532 integer_zero_node));
12533 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12534 gimple_omp_set_body (stmt, new_body);
12536 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12537 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12538 else
12539 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12540 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12541 gimple_bind_add_seq (bind, ilist);
12542 gimple_bind_add_stmt (bind, stmt);
12543 gimple_bind_add_seq (bind, olist);
12545 pop_gimplify_context (NULL);
12547 if (dep_bind)
12549 gimple_bind_add_seq (dep_bind, dep_ilist);
12550 gimple_bind_add_seq (dep_bind, tskred_ilist);
12551 gimple_bind_add_stmt (dep_bind, bind);
12552 gimple_bind_add_seq (dep_bind, tskred_olist);
12553 gimple_bind_add_seq (dep_bind, dep_olist);
12554 pop_gimplify_context (dep_bind);
12558 /* Lower the GIMPLE_OMP_TARGET in the current statement
12559 in GSI_P. CTX holds context information for the directive. */
12561 static void
12562 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12564 tree clauses;
12565 tree child_fn, t, c;
12566 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12567 gbind *tgt_bind, *bind, *dep_bind = NULL;
12568 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12569 location_t loc = gimple_location (stmt);
12570 bool offloaded, data_region;
12571 unsigned int map_cnt = 0;
12572 tree in_reduction_clauses = NULL_TREE;
12574 offloaded = is_gimple_omp_offloaded (stmt);
12575 switch (gimple_omp_target_kind (stmt))
12577 case GF_OMP_TARGET_KIND_REGION:
12578 tree *p, *q;
12579 q = &in_reduction_clauses;
12580 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12581 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12583 *q = *p;
12584 q = &OMP_CLAUSE_CHAIN (*q);
12585 *p = OMP_CLAUSE_CHAIN (*p);
12587 else
12588 p = &OMP_CLAUSE_CHAIN (*p);
12589 *q = NULL_TREE;
12590 *p = in_reduction_clauses;
12591 /* FALLTHRU */
12592 case GF_OMP_TARGET_KIND_UPDATE:
12593 case GF_OMP_TARGET_KIND_ENTER_DATA:
12594 case GF_OMP_TARGET_KIND_EXIT_DATA:
12595 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12596 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12597 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12598 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12599 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12600 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12601 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12602 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12603 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12604 data_region = false;
12605 break;
12606 case GF_OMP_TARGET_KIND_DATA:
12607 case GF_OMP_TARGET_KIND_OACC_DATA:
12608 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12609 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12610 data_region = true;
12611 break;
12612 default:
12613 gcc_unreachable ();
12616 /* Ensure that requires map is written via output_offload_tables, even if only
12617 'target (enter/exit) data' is used in the translation unit. */
12618 if (ENABLE_OFFLOADING && (omp_requires_mask & OMP_REQUIRES_TARGET_USED))
12619 g->have_offload = true;
12621 clauses = gimple_omp_target_clauses (stmt);
12623 gimple_seq dep_ilist = NULL;
12624 gimple_seq dep_olist = NULL;
12625 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12626 if (has_depend || in_reduction_clauses)
12628 push_gimplify_context ();
12629 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12630 if (has_depend)
12631 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12632 &dep_ilist, &dep_olist);
12633 if (in_reduction_clauses)
12634 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12635 ctx, NULL);
12638 tgt_bind = NULL;
12639 tgt_body = NULL;
12640 if (offloaded)
12642 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12643 tgt_body = gimple_bind_body (tgt_bind);
12645 else if (data_region)
12646 tgt_body = gimple_omp_body (stmt);
12647 child_fn = ctx->cb.dst_fn;
12649 push_gimplify_context ();
12650 fplist = NULL;
12652 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12653 switch (OMP_CLAUSE_CODE (c))
12655 tree var, x;
12657 default:
12658 break;
12659 case OMP_CLAUSE_MAP:
12660 #if CHECKING_P
12661 /* First check what we're prepared to handle in the following. */
12662 switch (OMP_CLAUSE_MAP_KIND (c))
12664 case GOMP_MAP_ALLOC:
12665 case GOMP_MAP_TO:
12666 case GOMP_MAP_FROM:
12667 case GOMP_MAP_TOFROM:
12668 case GOMP_MAP_POINTER:
12669 case GOMP_MAP_TO_PSET:
12670 case GOMP_MAP_DELETE:
12671 case GOMP_MAP_RELEASE:
12672 case GOMP_MAP_ALWAYS_TO:
12673 case GOMP_MAP_ALWAYS_FROM:
12674 case GOMP_MAP_ALWAYS_TOFROM:
12675 case GOMP_MAP_FORCE_PRESENT:
12676 case GOMP_MAP_ALWAYS_PRESENT_FROM:
12677 case GOMP_MAP_ALWAYS_PRESENT_TO:
12678 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
12680 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12681 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12682 case GOMP_MAP_STRUCT:
12683 case GOMP_MAP_ALWAYS_POINTER:
12684 case GOMP_MAP_ATTACH:
12685 case GOMP_MAP_DETACH:
12686 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
12687 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
12688 break;
12689 case GOMP_MAP_IF_PRESENT:
12690 case GOMP_MAP_FORCE_ALLOC:
12691 case GOMP_MAP_FORCE_TO:
12692 case GOMP_MAP_FORCE_FROM:
12693 case GOMP_MAP_FORCE_TOFROM:
12694 case GOMP_MAP_FORCE_DEVICEPTR:
12695 case GOMP_MAP_DEVICE_RESIDENT:
12696 case GOMP_MAP_LINK:
12697 case GOMP_MAP_FORCE_DETACH:
12698 gcc_assert (is_gimple_omp_oacc (stmt));
12699 break;
12700 default:
12701 gcc_unreachable ();
12703 #endif
12704 /* FALLTHRU */
12705 case OMP_CLAUSE_TO:
12706 case OMP_CLAUSE_FROM:
12707 oacc_firstprivate:
12708 var = OMP_CLAUSE_DECL (c);
12709 if (!DECL_P (var))
12711 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12712 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12713 && (OMP_CLAUSE_MAP_KIND (c)
12714 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12715 map_cnt++;
12716 continue;
12719 if (DECL_SIZE (var)
12720 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12722 tree var2 = DECL_VALUE_EXPR (var);
12723 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12724 var2 = TREE_OPERAND (var2, 0);
12725 gcc_assert (DECL_P (var2));
12726 var = var2;
12729 if (offloaded
12730 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12731 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12732 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12734 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12736 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12737 && varpool_node::get_create (var)->offloadable)
12738 continue;
12740 tree type = build_pointer_type (TREE_TYPE (var));
12741 tree new_var = lookup_decl (var, ctx);
12742 x = create_tmp_var_raw (type, get_name (new_var));
12743 gimple_add_tmp_var (x);
12744 x = build_simple_mem_ref (x);
12745 SET_DECL_VALUE_EXPR (new_var, x);
12746 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12748 continue;
12751 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12752 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12753 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12754 && is_omp_target (stmt))
12756 gcc_assert (maybe_lookup_field (c, ctx));
12757 map_cnt++;
12758 continue;
12761 if (!maybe_lookup_field (var, ctx))
12762 continue;
12764 /* Don't remap compute constructs' reduction variables, because the
12765 intermediate result must be local to each gang. */
12766 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12767 && is_gimple_omp_oacc (ctx->stmt)
12768 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12770 x = build_receiver_ref (var, true, ctx);
12771 tree new_var = lookup_decl (var, ctx);
12773 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12774 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12775 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12776 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12777 x = build_simple_mem_ref (x);
12778 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12780 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12781 if (omp_privatize_by_reference (new_var)
12782 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12783 || DECL_BY_REFERENCE (var)))
12785 /* Create a local object to hold the instance
12786 value. */
12787 tree type = TREE_TYPE (TREE_TYPE (new_var));
12788 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12789 tree inst = create_tmp_var (type, id);
12790 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12791 x = build_fold_addr_expr (inst);
12793 gimplify_assign (new_var, x, &fplist);
12795 else if (DECL_P (new_var))
12797 SET_DECL_VALUE_EXPR (new_var, x);
12798 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12800 else
12801 gcc_unreachable ();
12803 map_cnt++;
12804 break;
12806 case OMP_CLAUSE_FIRSTPRIVATE:
12807 omp_firstprivate_recv:
12808 gcc_checking_assert (offloaded);
12809 if (is_gimple_omp_oacc (ctx->stmt))
12811 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12812 gcc_checking_assert (!is_oacc_kernels (ctx));
12813 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12814 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12816 goto oacc_firstprivate;
12818 map_cnt++;
12819 var = OMP_CLAUSE_DECL (c);
12820 if (!omp_privatize_by_reference (var)
12821 && !is_gimple_reg_type (TREE_TYPE (var)))
12823 tree new_var = lookup_decl (var, ctx);
12824 if (is_variable_sized (var))
12826 tree pvar = DECL_VALUE_EXPR (var);
12827 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12828 pvar = TREE_OPERAND (pvar, 0);
12829 gcc_assert (DECL_P (pvar));
12830 tree new_pvar = lookup_decl (pvar, ctx);
12831 x = build_fold_indirect_ref (new_pvar);
12832 TREE_THIS_NOTRAP (x) = 1;
12834 else
12835 x = build_receiver_ref (var, true, ctx);
12836 SET_DECL_VALUE_EXPR (new_var, x);
12837 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12839 /* Fortran array descriptors: firstprivate of data + attach. */
12840 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
12841 && lang_hooks.decls.omp_array_data (var, true))
12842 map_cnt += 2;
12843 break;
12845 case OMP_CLAUSE_PRIVATE:
12846 gcc_checking_assert (offloaded);
12847 if (is_gimple_omp_oacc (ctx->stmt))
12849 /* No 'private' clauses on OpenACC 'kernels'. */
12850 gcc_checking_assert (!is_oacc_kernels (ctx));
12851 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12852 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12854 break;
12856 var = OMP_CLAUSE_DECL (c);
12857 if (is_variable_sized (var))
12859 tree new_var = lookup_decl (var, ctx);
12860 tree pvar = DECL_VALUE_EXPR (var);
12861 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12862 pvar = TREE_OPERAND (pvar, 0);
12863 gcc_assert (DECL_P (pvar));
12864 tree new_pvar = lookup_decl (pvar, ctx);
12865 x = build_fold_indirect_ref (new_pvar);
12866 TREE_THIS_NOTRAP (x) = 1;
12867 SET_DECL_VALUE_EXPR (new_var, x);
12868 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12870 break;
12872 case OMP_CLAUSE_USE_DEVICE_PTR:
12873 case OMP_CLAUSE_USE_DEVICE_ADDR:
12874 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12875 case OMP_CLAUSE_IS_DEVICE_PTR:
12876 var = OMP_CLAUSE_DECL (c);
12877 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
12879 while (TREE_CODE (var) == INDIRECT_REF
12880 || TREE_CODE (var) == ARRAY_REF)
12881 var = TREE_OPERAND (var, 0);
12882 if (lang_hooks.decls.omp_array_data (var, true))
12883 goto omp_firstprivate_recv;
12885 map_cnt++;
12886 if (is_variable_sized (var))
12888 tree new_var = lookup_decl (var, ctx);
12889 tree pvar = DECL_VALUE_EXPR (var);
12890 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12891 pvar = TREE_OPERAND (pvar, 0);
12892 gcc_assert (DECL_P (pvar));
12893 tree new_pvar = lookup_decl (pvar, ctx);
12894 x = build_fold_indirect_ref (new_pvar);
12895 TREE_THIS_NOTRAP (x) = 1;
12896 SET_DECL_VALUE_EXPR (new_var, x);
12897 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12899 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12900 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
12901 && !omp_privatize_by_reference (var)
12902 && !omp_is_allocatable_or_ptr (var)
12903 && !lang_hooks.decls.omp_array_data (var, true))
12904 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12906 tree new_var = lookup_decl (var, ctx);
12907 tree type = build_pointer_type (TREE_TYPE (var));
12908 x = create_tmp_var_raw (type, get_name (new_var));
12909 gimple_add_tmp_var (x);
12910 x = build_simple_mem_ref (x);
12911 SET_DECL_VALUE_EXPR (new_var, x);
12912 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12914 else
12916 tree new_var = lookup_decl (var, ctx);
12917 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12918 gimple_add_tmp_var (x);
12919 SET_DECL_VALUE_EXPR (new_var, x);
12920 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12922 break;
12925 if (offloaded)
12927 target_nesting_level++;
12928 lower_omp (&tgt_body, ctx);
12929 target_nesting_level--;
12931 else if (data_region)
12932 lower_omp (&tgt_body, ctx);
12934 if (offloaded)
12936 /* Declare all the variables created by mapping and the variables
12937 declared in the scope of the target body. */
12938 record_vars_into (ctx->block_vars, child_fn);
12939 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
12940 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
12943 olist = NULL;
12944 ilist = NULL;
12945 if (ctx->record_type)
12947 ctx->sender_decl
12948 = create_tmp_var (ctx->record_type, ".omp_data_arr");
12949 DECL_NAMELESS (ctx->sender_decl) = 1;
12950 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12951 t = make_tree_vec (3);
12952 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12953 TREE_VEC_ELT (t, 1)
12954 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12955 ".omp_data_sizes");
12956 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12957 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12958 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12959 tree tkind_type = short_unsigned_type_node;
12960 int talign_shift = 8;
12961 TREE_VEC_ELT (t, 2)
12962 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12963 ".omp_data_kinds");
12964 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12965 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12966 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12967 gimple_omp_target_set_data_arg (stmt, t);
12969 vec<constructor_elt, va_gc> *vsize;
12970 vec<constructor_elt, va_gc> *vkind;
12971 vec_alloc (vsize, map_cnt);
12972 vec_alloc (vkind, map_cnt);
12973 unsigned int map_idx = 0;
12975 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12976 switch (OMP_CLAUSE_CODE (c))
12978 tree ovar, nc, s, purpose, var, x, type;
12979 unsigned int talign;
12981 default:
12982 break;
12984 case OMP_CLAUSE_MAP:
12985 case OMP_CLAUSE_TO:
12986 case OMP_CLAUSE_FROM:
12987 oacc_firstprivate_map:
12988 nc = c;
12989 ovar = OMP_CLAUSE_DECL (c);
12990 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12991 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12992 || (OMP_CLAUSE_MAP_KIND (c)
12993 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12994 break;
12995 if (!DECL_P (ovar))
12997 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12998 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
13000 nc = OMP_CLAUSE_CHAIN (c);
13001 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
13002 == get_base_address (ovar));
13003 ovar = OMP_CLAUSE_DECL (nc);
13005 else
13007 tree x = build_sender_ref (ovar, ctx);
13008 tree v = ovar;
13009 if (in_reduction_clauses
13010 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13011 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13013 v = unshare_expr (v);
13014 tree *p = &v;
13015 while (handled_component_p (*p)
13016 || TREE_CODE (*p) == INDIRECT_REF
13017 || TREE_CODE (*p) == ADDR_EXPR
13018 || TREE_CODE (*p) == MEM_REF
13019 || TREE_CODE (*p) == NON_LVALUE_EXPR)
13020 p = &TREE_OPERAND (*p, 0);
13021 tree d = *p;
13022 if (is_variable_sized (d))
13024 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13025 d = DECL_VALUE_EXPR (d);
13026 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13027 d = TREE_OPERAND (d, 0);
13028 gcc_assert (DECL_P (d));
13030 splay_tree_key key
13031 = (splay_tree_key) &DECL_CONTEXT (d);
13032 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13033 key)->value;
13034 if (d == *p)
13035 *p = nd;
13036 else
13037 *p = build_fold_indirect_ref (nd);
13039 v = build_fold_addr_expr_with_type (v, ptr_type_node);
13040 gimplify_assign (x, v, &ilist);
13041 nc = NULL_TREE;
13044 else
13046 if (DECL_SIZE (ovar)
13047 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
13049 tree ovar2 = DECL_VALUE_EXPR (ovar);
13050 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
13051 ovar2 = TREE_OPERAND (ovar2, 0);
13052 gcc_assert (DECL_P (ovar2));
13053 ovar = ovar2;
13055 if (!maybe_lookup_field (ovar, ctx)
13056 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13057 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13058 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
13059 continue;
13062 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
13063 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
13064 talign = DECL_ALIGN_UNIT (ovar);
13066 var = NULL_TREE;
13067 if (nc)
13069 if (in_reduction_clauses
13070 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13071 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13073 tree d = ovar;
13074 if (is_variable_sized (d))
13076 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13077 d = DECL_VALUE_EXPR (d);
13078 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13079 d = TREE_OPERAND (d, 0);
13080 gcc_assert (DECL_P (d));
13082 splay_tree_key key
13083 = (splay_tree_key) &DECL_CONTEXT (d);
13084 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13085 key)->value;
13086 if (d == ovar)
13087 var = nd;
13088 else
13089 var = build_fold_indirect_ref (nd);
13091 else
13092 var = lookup_decl_in_outer_ctx (ovar, ctx);
13094 if (nc
13095 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13096 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13097 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13098 && is_omp_target (stmt))
13100 x = build_sender_ref (c, ctx);
13101 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13103 else if (nc)
13105 x = build_sender_ref (ovar, ctx);
13107 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13108 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13109 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13110 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13112 gcc_assert (offloaded);
13113 tree avar
13114 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13115 mark_addressable (avar);
13116 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13117 talign = DECL_ALIGN_UNIT (avar);
13118 avar = build_fold_addr_expr (avar);
13119 gimplify_assign (x, avar, &ilist);
13121 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13123 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13124 if (!omp_privatize_by_reference (var))
13126 if (is_gimple_reg (var)
13127 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13128 suppress_warning (var);
13129 var = build_fold_addr_expr (var);
13131 else
13132 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13133 gimplify_assign (x, var, &ilist);
13135 else if (is_gimple_reg (var))
13137 gcc_assert (offloaded);
13138 tree avar = create_tmp_var (TREE_TYPE (var));
13139 mark_addressable (avar);
13140 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13141 if (GOMP_MAP_COPY_TO_P (map_kind)
13142 || map_kind == GOMP_MAP_POINTER
13143 || map_kind == GOMP_MAP_TO_PSET
13144 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13146 /* If we need to initialize a temporary
13147 with VAR because it is not addressable, and
13148 the variable hasn't been initialized yet, then
13149 we'll get a warning for the store to avar.
13150 Don't warn in that case, the mapping might
13151 be implicit. */
13152 suppress_warning (var, OPT_Wuninitialized);
13153 gimplify_assign (avar, var, &ilist);
13155 avar = build_fold_addr_expr (avar);
13156 gimplify_assign (x, avar, &ilist);
13157 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13158 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13159 && !TYPE_READONLY (TREE_TYPE (var)))
13161 x = unshare_expr (x);
13162 x = build_simple_mem_ref (x);
13163 gimplify_assign (var, x, &olist);
13166 else
13168 /* While MAP is handled explicitly by the FE,
13169 for 'target update', only the identified is passed. */
13170 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13171 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13172 && (omp_is_allocatable_or_ptr (var)
13173 && omp_check_optional_argument (var, false)))
13174 var = build_fold_indirect_ref (var);
13175 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13176 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13177 || (!omp_is_allocatable_or_ptr (var)
13178 && !omp_check_optional_argument (var, false)))
13179 var = build_fold_addr_expr (var);
13180 gimplify_assign (x, var, &ilist);
13183 s = NULL_TREE;
13184 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13186 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13187 s = TREE_TYPE (ovar);
13188 if (TREE_CODE (s) == REFERENCE_TYPE
13189 || omp_check_optional_argument (ovar, false))
13190 s = TREE_TYPE (s);
13191 s = TYPE_SIZE_UNIT (s);
13193 else
13194 s = OMP_CLAUSE_SIZE (c);
13195 if (s == NULL_TREE)
13196 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13197 s = fold_convert (size_type_node, s);
13198 purpose = size_int (map_idx++);
13199 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13200 if (TREE_CODE (s) != INTEGER_CST)
13201 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13203 unsigned HOST_WIDE_INT tkind, tkind_zero;
13204 switch (OMP_CLAUSE_CODE (c))
13206 case OMP_CLAUSE_MAP:
13207 tkind = OMP_CLAUSE_MAP_KIND (c);
13208 tkind_zero = tkind;
13209 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13210 switch (tkind)
13212 case GOMP_MAP_ALLOC:
13213 case GOMP_MAP_IF_PRESENT:
13214 case GOMP_MAP_TO:
13215 case GOMP_MAP_FROM:
13216 case GOMP_MAP_TOFROM:
13217 case GOMP_MAP_ALWAYS_TO:
13218 case GOMP_MAP_ALWAYS_FROM:
13219 case GOMP_MAP_ALWAYS_TOFROM:
13220 case GOMP_MAP_ALWAYS_PRESENT_TO:
13221 case GOMP_MAP_ALWAYS_PRESENT_FROM:
13222 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
13223 case GOMP_MAP_RELEASE:
13224 case GOMP_MAP_FORCE_TO:
13225 case GOMP_MAP_FORCE_FROM:
13226 case GOMP_MAP_FORCE_TOFROM:
13227 case GOMP_MAP_FORCE_PRESENT:
13228 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13229 break;
13230 case GOMP_MAP_DELETE:
13231 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13232 default:
13233 break;
13235 if (tkind_zero != tkind)
13237 if (integer_zerop (s))
13238 tkind = tkind_zero;
13239 else if (integer_nonzerop (s))
13240 tkind_zero = tkind;
13242 if (tkind_zero == tkind
13243 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c)
13244 && (((tkind & GOMP_MAP_FLAG_SPECIAL_BITS)
13245 & ~GOMP_MAP_IMPLICIT)
13246 == 0))
13248 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13249 bits are not interfered by other special bit encodings,
13250 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13251 to see. */
13252 tkind |= GOMP_MAP_IMPLICIT;
13253 tkind_zero = tkind;
13255 break;
13256 case OMP_CLAUSE_FIRSTPRIVATE:
13257 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13258 tkind = GOMP_MAP_TO;
13259 tkind_zero = tkind;
13260 break;
13261 case OMP_CLAUSE_TO:
13262 tkind
13263 = (OMP_CLAUSE_MOTION_PRESENT (c)
13264 ? GOMP_MAP_ALWAYS_PRESENT_TO : GOMP_MAP_TO);
13265 tkind_zero = tkind;
13266 break;
13267 case OMP_CLAUSE_FROM:
13268 tkind
13269 = (OMP_CLAUSE_MOTION_PRESENT (c)
13270 ? GOMP_MAP_ALWAYS_PRESENT_FROM : GOMP_MAP_FROM);
13271 tkind_zero = tkind;
13272 break;
13273 default:
13274 gcc_unreachable ();
13276 gcc_checking_assert (tkind
13277 < (HOST_WIDE_INT_C (1U) << talign_shift));
13278 gcc_checking_assert (tkind_zero
13279 < (HOST_WIDE_INT_C (1U) << talign_shift));
13280 talign = ceil_log2 (talign);
13281 tkind |= talign << talign_shift;
13282 tkind_zero |= talign << talign_shift;
13283 gcc_checking_assert (tkind
13284 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13285 gcc_checking_assert (tkind_zero
13286 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13287 if (tkind == tkind_zero)
13288 x = build_int_cstu (tkind_type, tkind);
13289 else
13291 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13292 x = build3 (COND_EXPR, tkind_type,
13293 fold_build2 (EQ_EXPR, boolean_type_node,
13294 unshare_expr (s), size_zero_node),
13295 build_int_cstu (tkind_type, tkind_zero),
13296 build_int_cstu (tkind_type, tkind));
13298 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13299 if (nc && nc != c)
13300 c = nc;
13301 break;
13303 case OMP_CLAUSE_FIRSTPRIVATE:
13304 omp_has_device_addr_descr:
13305 if (is_gimple_omp_oacc (ctx->stmt))
13306 goto oacc_firstprivate_map;
13307 ovar = OMP_CLAUSE_DECL (c);
13308 if (omp_privatize_by_reference (ovar))
13309 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13310 else
13311 talign = DECL_ALIGN_UNIT (ovar);
13312 var = lookup_decl_in_outer_ctx (ovar, ctx);
13313 x = build_sender_ref (ovar, ctx);
13314 tkind = GOMP_MAP_FIRSTPRIVATE;
13315 type = TREE_TYPE (ovar);
13316 if (omp_privatize_by_reference (ovar))
13317 type = TREE_TYPE (type);
13318 if ((INTEGRAL_TYPE_P (type)
13319 && TYPE_PRECISION (type) <= POINTER_SIZE)
13320 || TREE_CODE (type) == POINTER_TYPE)
13322 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13323 tree t = var;
13324 if (omp_privatize_by_reference (var))
13325 t = build_simple_mem_ref (var);
13326 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13327 suppress_warning (var);
13328 if (TREE_CODE (type) != POINTER_TYPE)
13329 t = fold_convert (pointer_sized_int_node, t);
13330 t = fold_convert (TREE_TYPE (x), t);
13331 gimplify_assign (x, t, &ilist);
13333 else if (omp_privatize_by_reference (var))
13334 gimplify_assign (x, var, &ilist);
13335 else if (is_gimple_reg (var))
13337 tree avar = create_tmp_var (TREE_TYPE (var));
13338 mark_addressable (avar);
13339 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13340 suppress_warning (var);
13341 gimplify_assign (avar, var, &ilist);
13342 avar = build_fold_addr_expr (avar);
13343 gimplify_assign (x, avar, &ilist);
13345 else
13347 var = build_fold_addr_expr (var);
13348 gimplify_assign (x, var, &ilist);
13350 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13351 s = size_int (0);
13352 else if (omp_privatize_by_reference (ovar))
13353 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13354 else
13355 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13356 s = fold_convert (size_type_node, s);
13357 purpose = size_int (map_idx++);
13358 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13359 if (TREE_CODE (s) != INTEGER_CST)
13360 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13362 gcc_checking_assert (tkind
13363 < (HOST_WIDE_INT_C (1U) << talign_shift));
13364 talign = ceil_log2 (talign);
13365 tkind |= talign << talign_shift;
13366 gcc_checking_assert (tkind
13367 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13368 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13369 build_int_cstu (tkind_type, tkind));
13370 /* Fortran array descriptors: firstprivate of data + attach. */
13371 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13372 && lang_hooks.decls.omp_array_data (ovar, true))
13374 tree not_null_lb, null_lb, after_lb;
13375 tree var1, var2, size1, size2;
13376 tree present = omp_check_optional_argument (ovar, true);
13377 if (present)
13379 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13380 not_null_lb = create_artificial_label (clause_loc);
13381 null_lb = create_artificial_label (clause_loc);
13382 after_lb = create_artificial_label (clause_loc);
13383 gimple_seq seq = NULL;
13384 present = force_gimple_operand (present, &seq, true,
13385 NULL_TREE);
13386 gimple_seq_add_seq (&ilist, seq);
13387 gimple_seq_add_stmt (&ilist,
13388 gimple_build_cond_from_tree (present,
13389 not_null_lb, null_lb));
13390 gimple_seq_add_stmt (&ilist,
13391 gimple_build_label (not_null_lb));
13393 var1 = lang_hooks.decls.omp_array_data (var, false);
13394 size1 = lang_hooks.decls.omp_array_size (var, &ilist);
13395 var2 = build_fold_addr_expr (x);
13396 if (!POINTER_TYPE_P (TREE_TYPE (var)))
13397 var = build_fold_addr_expr (var);
13398 size2 = fold_build2 (POINTER_DIFF_EXPR, ssizetype,
13399 build_fold_addr_expr (var1), var);
13400 size2 = fold_convert (sizetype, size2);
13401 if (present)
13403 tree tmp = create_tmp_var (TREE_TYPE (var1));
13404 gimplify_assign (tmp, var1, &ilist);
13405 var1 = tmp;
13406 tmp = create_tmp_var (TREE_TYPE (var2));
13407 gimplify_assign (tmp, var2, &ilist);
13408 var2 = tmp;
13409 tmp = create_tmp_var (TREE_TYPE (size1));
13410 gimplify_assign (tmp, size1, &ilist);
13411 size1 = tmp;
13412 tmp = create_tmp_var (TREE_TYPE (size2));
13413 gimplify_assign (tmp, size2, &ilist);
13414 size2 = tmp;
13415 gimple_seq_add_stmt (&ilist, gimple_build_goto (after_lb));
13416 gimple_seq_add_stmt (&ilist, gimple_build_label (null_lb));
13417 gimplify_assign (var1, null_pointer_node, &ilist);
13418 gimplify_assign (var2, null_pointer_node, &ilist);
13419 gimplify_assign (size1, size_zero_node, &ilist);
13420 gimplify_assign (size2, size_zero_node, &ilist);
13421 gimple_seq_add_stmt (&ilist, gimple_build_label (after_lb));
13423 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13424 gimplify_assign (x, var1, &ilist);
13425 tkind = GOMP_MAP_FIRSTPRIVATE;
13426 talign = DECL_ALIGN_UNIT (ovar);
13427 talign = ceil_log2 (talign);
13428 tkind |= talign << talign_shift;
13429 gcc_checking_assert (tkind
13430 <= tree_to_uhwi (
13431 TYPE_MAX_VALUE (tkind_type)));
13432 purpose = size_int (map_idx++);
13433 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size1);
13434 if (TREE_CODE (size1) != INTEGER_CST)
13435 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13436 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13437 build_int_cstu (tkind_type, tkind));
13438 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13439 gimplify_assign (x, var2, &ilist);
13440 tkind = GOMP_MAP_ATTACH;
13441 purpose = size_int (map_idx++);
13442 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size2);
13443 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13444 build_int_cstu (tkind_type, tkind));
13446 break;
13448 case OMP_CLAUSE_USE_DEVICE_PTR:
13449 case OMP_CLAUSE_USE_DEVICE_ADDR:
13450 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13451 case OMP_CLAUSE_IS_DEVICE_PTR:
13452 ovar = OMP_CLAUSE_DECL (c);
13453 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13455 if (lang_hooks.decls.omp_array_data (ovar, true))
13456 goto omp_has_device_addr_descr;
13457 while (TREE_CODE (ovar) == INDIRECT_REF
13458 || TREE_CODE (ovar) == ARRAY_REF)
13459 ovar = TREE_OPERAND (ovar, 0);
13461 var = lookup_decl_in_outer_ctx (ovar, ctx);
13463 if (lang_hooks.decls.omp_array_data (ovar, true))
13465 tkind = ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13466 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13467 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13468 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13470 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13471 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13473 tkind = GOMP_MAP_USE_DEVICE_PTR;
13474 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13476 else
13478 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13479 x = build_sender_ref (ovar, ctx);
13482 if (is_gimple_omp_oacc (ctx->stmt))
13484 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13486 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13487 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13490 type = TREE_TYPE (ovar);
13491 if (lang_hooks.decls.omp_array_data (ovar, true))
13492 var = lang_hooks.decls.omp_array_data (var, false);
13493 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13494 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13495 && !omp_privatize_by_reference (ovar)
13496 && !omp_is_allocatable_or_ptr (ovar))
13497 || TREE_CODE (type) == ARRAY_TYPE)
13498 var = build_fold_addr_expr (var);
13499 else
13501 if (omp_privatize_by_reference (ovar)
13502 || omp_check_optional_argument (ovar, false)
13503 || omp_is_allocatable_or_ptr (ovar))
13505 type = TREE_TYPE (type);
13506 if (POINTER_TYPE_P (type)
13507 && TREE_CODE (type) != ARRAY_TYPE
13508 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13509 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13510 && !omp_is_allocatable_or_ptr (ovar))
13511 || (omp_privatize_by_reference (ovar)
13512 && omp_is_allocatable_or_ptr (ovar))))
13513 var = build_simple_mem_ref (var);
13514 var = fold_convert (TREE_TYPE (x), var);
13517 tree present;
13518 present = omp_check_optional_argument (ovar, true);
13519 if (present)
13521 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13522 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13523 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13524 tree new_x = unshare_expr (x);
13525 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13526 fb_rvalue);
13527 gcond *cond = gimple_build_cond_from_tree (present,
13528 notnull_label,
13529 null_label);
13530 gimple_seq_add_stmt (&ilist, cond);
13531 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13532 gimplify_assign (new_x, null_pointer_node, &ilist);
13533 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13534 gimple_seq_add_stmt (&ilist,
13535 gimple_build_label (notnull_label));
13536 gimplify_assign (x, var, &ilist);
13537 gimple_seq_add_stmt (&ilist,
13538 gimple_build_label (opt_arg_label));
13540 else
13541 gimplify_assign (x, var, &ilist);
13542 s = size_int (0);
13543 purpose = size_int (map_idx++);
13544 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13545 gcc_checking_assert (tkind
13546 < (HOST_WIDE_INT_C (1U) << talign_shift));
13547 gcc_checking_assert (tkind
13548 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13549 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13550 build_int_cstu (tkind_type, tkind));
13551 break;
13554 gcc_assert (map_idx == map_cnt);
13556 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13557 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13558 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13559 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13560 for (int i = 1; i <= 2; i++)
13561 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13563 gimple_seq initlist = NULL;
13564 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13565 TREE_VEC_ELT (t, i)),
13566 &initlist, true, NULL_TREE);
13567 gimple_seq_add_seq (&ilist, initlist);
13569 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13570 gimple_seq_add_stmt (&olist,
13571 gimple_build_assign (TREE_VEC_ELT (t, i),
13572 clobber));
13574 else if (omp_maybe_offloaded_ctx (ctx->outer))
13576 tree id = get_identifier ("omp declare target");
13577 tree decl = TREE_VEC_ELT (t, i);
13578 DECL_ATTRIBUTES (decl)
13579 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13580 varpool_node *node = varpool_node::get (decl);
13581 if (node)
13583 node->offloadable = 1;
13584 if (ENABLE_OFFLOADING)
13586 g->have_offload = true;
13587 vec_safe_push (offload_vars, t);
13592 tree clobber = build_clobber (ctx->record_type);
13593 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13594 clobber));
13597 /* Once all the expansions are done, sequence all the different
13598 fragments inside gimple_omp_body. */
13600 new_body = NULL;
13602 if (offloaded
13603 && ctx->record_type)
13605 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13606 /* fixup_child_record_type might have changed receiver_decl's type. */
13607 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13608 gimple_seq_add_stmt (&new_body,
13609 gimple_build_assign (ctx->receiver_decl, t));
13611 gimple_seq_add_seq (&new_body, fplist);
13613 if (offloaded || data_region)
13615 tree prev = NULL_TREE;
13616 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13617 switch (OMP_CLAUSE_CODE (c))
13619 tree var, x;
13620 default:
13621 break;
13622 case OMP_CLAUSE_FIRSTPRIVATE:
13623 omp_firstprivatize_data_region:
13624 if (is_gimple_omp_oacc (ctx->stmt))
13625 break;
13626 var = OMP_CLAUSE_DECL (c);
13627 if (omp_privatize_by_reference (var)
13628 || is_gimple_reg_type (TREE_TYPE (var)))
13630 tree new_var = lookup_decl (var, ctx);
13631 tree type;
13632 type = TREE_TYPE (var);
13633 if (omp_privatize_by_reference (var))
13634 type = TREE_TYPE (type);
13635 if ((INTEGRAL_TYPE_P (type)
13636 && TYPE_PRECISION (type) <= POINTER_SIZE)
13637 || TREE_CODE (type) == POINTER_TYPE)
13639 x = build_receiver_ref (var, false, ctx);
13640 if (TREE_CODE (type) != POINTER_TYPE)
13641 x = fold_convert (pointer_sized_int_node, x);
13642 x = fold_convert (type, x);
13643 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13644 fb_rvalue);
13645 if (omp_privatize_by_reference (var))
13647 tree v = create_tmp_var_raw (type, get_name (var));
13648 gimple_add_tmp_var (v);
13649 TREE_ADDRESSABLE (v) = 1;
13650 gimple_seq_add_stmt (&new_body,
13651 gimple_build_assign (v, x));
13652 x = build_fold_addr_expr (v);
13654 gimple_seq_add_stmt (&new_body,
13655 gimple_build_assign (new_var, x));
13657 else
13659 bool by_ref = !omp_privatize_by_reference (var);
13660 x = build_receiver_ref (var, by_ref, ctx);
13661 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13662 fb_rvalue);
13663 gimple_seq_add_stmt (&new_body,
13664 gimple_build_assign (new_var, x));
13667 else if (is_variable_sized (var))
13669 tree pvar = DECL_VALUE_EXPR (var);
13670 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13671 pvar = TREE_OPERAND (pvar, 0);
13672 gcc_assert (DECL_P (pvar));
13673 tree new_var = lookup_decl (pvar, ctx);
13674 x = build_receiver_ref (var, false, ctx);
13675 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13676 gimple_seq_add_stmt (&new_body,
13677 gimple_build_assign (new_var, x));
13679 break;
13680 case OMP_CLAUSE_PRIVATE:
13681 if (is_gimple_omp_oacc (ctx->stmt))
13682 break;
13683 var = OMP_CLAUSE_DECL (c);
13684 if (omp_privatize_by_reference (var))
13686 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13687 tree new_var = lookup_decl (var, ctx);
13688 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13689 if (TREE_CONSTANT (x))
13691 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13692 get_name (var));
13693 gimple_add_tmp_var (x);
13694 TREE_ADDRESSABLE (x) = 1;
13695 x = build_fold_addr_expr_loc (clause_loc, x);
13697 else
13698 break;
13700 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13701 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13702 gimple_seq_add_stmt (&new_body,
13703 gimple_build_assign (new_var, x));
13705 break;
13706 case OMP_CLAUSE_USE_DEVICE_PTR:
13707 case OMP_CLAUSE_USE_DEVICE_ADDR:
13708 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13709 case OMP_CLAUSE_IS_DEVICE_PTR:
13710 tree new_var;
13711 gimple_seq assign_body;
13712 bool is_array_data;
13713 bool do_optional_check;
13714 assign_body = NULL;
13715 do_optional_check = false;
13716 var = OMP_CLAUSE_DECL (c);
13717 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13718 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR && is_array_data)
13719 goto omp_firstprivatize_data_region;
13721 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13722 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13723 x = build_sender_ref (is_array_data
13724 ? (splay_tree_key) &DECL_NAME (var)
13725 : (splay_tree_key) &DECL_UID (var), ctx);
13726 else
13728 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13730 while (TREE_CODE (var) == INDIRECT_REF
13731 || TREE_CODE (var) == ARRAY_REF)
13732 var = TREE_OPERAND (var, 0);
13734 x = build_receiver_ref (var, false, ctx);
13737 if (is_array_data)
13739 bool is_ref = omp_privatize_by_reference (var);
13740 do_optional_check = true;
13741 /* First, we copy the descriptor data from the host; then
13742 we update its data to point to the target address. */
13743 new_var = lookup_decl (var, ctx);
13744 new_var = DECL_VALUE_EXPR (new_var);
13745 tree v = new_var;
13746 tree v2 = var;
13747 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR
13748 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR)
13749 v2 = maybe_lookup_decl_in_outer_ctx (var, ctx);
13751 if (is_ref)
13753 v2 = build_fold_indirect_ref (v2);
13754 v = create_tmp_var_raw (TREE_TYPE (v2), get_name (var));
13755 gimple_add_tmp_var (v);
13756 TREE_ADDRESSABLE (v) = 1;
13757 gimplify_assign (v, v2, &assign_body);
13758 tree rhs = build_fold_addr_expr (v);
13759 gimple_seq_add_stmt (&assign_body,
13760 gimple_build_assign (new_var, rhs));
13762 else
13763 gimplify_assign (new_var, v2, &assign_body);
13765 v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13766 gcc_assert (v2);
13767 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13768 gimple_seq_add_stmt (&assign_body,
13769 gimple_build_assign (v2, x));
13771 else if (is_variable_sized (var))
13773 tree pvar = DECL_VALUE_EXPR (var);
13774 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13775 pvar = TREE_OPERAND (pvar, 0);
13776 gcc_assert (DECL_P (pvar));
13777 new_var = lookup_decl (pvar, ctx);
13778 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13779 gimple_seq_add_stmt (&assign_body,
13780 gimple_build_assign (new_var, x));
13782 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13783 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13784 && !omp_privatize_by_reference (var)
13785 && !omp_is_allocatable_or_ptr (var))
13786 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13788 new_var = lookup_decl (var, ctx);
13789 new_var = DECL_VALUE_EXPR (new_var);
13790 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13791 new_var = TREE_OPERAND (new_var, 0);
13792 gcc_assert (DECL_P (new_var));
13793 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13794 gimple_seq_add_stmt (&assign_body,
13795 gimple_build_assign (new_var, x));
13797 else
13799 tree type = TREE_TYPE (var);
13800 new_var = lookup_decl (var, ctx);
13801 if (omp_privatize_by_reference (var))
13803 type = TREE_TYPE (type);
13804 if (POINTER_TYPE_P (type)
13805 && TREE_CODE (type) != ARRAY_TYPE
13806 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13807 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13808 || (omp_privatize_by_reference (var)
13809 && omp_is_allocatable_or_ptr (var))))
13811 tree v = create_tmp_var_raw (type, get_name (var));
13812 gimple_add_tmp_var (v);
13813 TREE_ADDRESSABLE (v) = 1;
13814 x = fold_convert (type, x);
13815 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13816 fb_rvalue);
13817 gimple_seq_add_stmt (&assign_body,
13818 gimple_build_assign (v, x));
13819 x = build_fold_addr_expr (v);
13820 do_optional_check = true;
13823 new_var = DECL_VALUE_EXPR (new_var);
13824 x = fold_convert (TREE_TYPE (new_var), x);
13825 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13826 gimple_seq_add_stmt (&assign_body,
13827 gimple_build_assign (new_var, x));
13829 tree present;
13830 present = ((do_optional_check
13831 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13832 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13833 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13834 : NULL_TREE);
13835 if (present)
13837 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13838 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13839 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13840 glabel *null_glabel = gimple_build_label (null_label);
13841 glabel *notnull_glabel = gimple_build_label (notnull_label);
13842 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13843 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13844 fb_rvalue);
13845 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13846 fb_rvalue);
13847 gcond *cond = gimple_build_cond_from_tree (present,
13848 notnull_label,
13849 null_label);
13850 gimple_seq_add_stmt (&new_body, cond);
13851 gimple_seq_add_stmt (&new_body, null_glabel);
13852 gimplify_assign (new_var, null_pointer_node, &new_body);
13853 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13854 gimple_seq_add_stmt (&new_body, notnull_glabel);
13855 gimple_seq_add_seq (&new_body, assign_body);
13856 gimple_seq_add_stmt (&new_body,
13857 gimple_build_label (opt_arg_label));
13859 else
13860 gimple_seq_add_seq (&new_body, assign_body);
13861 break;
13863 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13864 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13865 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13866 or references to VLAs. */
13867 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13868 switch (OMP_CLAUSE_CODE (c))
13870 tree var;
13871 default:
13872 break;
13873 case OMP_CLAUSE_MAP:
13874 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13875 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13877 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13878 poly_int64 offset = 0;
13879 gcc_assert (prev);
13880 var = OMP_CLAUSE_DECL (c);
13881 if (DECL_P (var)
13882 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13883 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13884 ctx))
13885 && varpool_node::get_create (var)->offloadable)
13886 break;
13887 if (TREE_CODE (var) == INDIRECT_REF
13888 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
13889 var = TREE_OPERAND (var, 0);
13890 if (TREE_CODE (var) == COMPONENT_REF)
13892 var = get_addr_base_and_unit_offset (var, &offset);
13893 gcc_assert (var != NULL_TREE && DECL_P (var));
13895 else if (DECL_SIZE (var)
13896 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
13898 tree var2 = DECL_VALUE_EXPR (var);
13899 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
13900 var2 = TREE_OPERAND (var2, 0);
13901 gcc_assert (DECL_P (var2));
13902 var = var2;
13904 tree new_var = lookup_decl (var, ctx), x;
13905 tree type = TREE_TYPE (new_var);
13906 bool is_ref;
13907 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
13908 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13909 == COMPONENT_REF))
13911 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
13912 is_ref = true;
13913 new_var = build2 (MEM_REF, type,
13914 build_fold_addr_expr (new_var),
13915 build_int_cst (build_pointer_type (type),
13916 offset));
13918 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
13920 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
13921 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
13922 new_var = build2 (MEM_REF, type,
13923 build_fold_addr_expr (new_var),
13924 build_int_cst (build_pointer_type (type),
13925 offset));
13927 else
13928 is_ref = omp_privatize_by_reference (var);
13929 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13930 is_ref = false;
13931 bool ref_to_array = false;
13932 bool ref_to_ptr = false;
13933 if (is_ref)
13935 type = TREE_TYPE (type);
13936 if (TREE_CODE (type) == ARRAY_TYPE)
13938 type = build_pointer_type (type);
13939 ref_to_array = true;
13942 else if (TREE_CODE (type) == ARRAY_TYPE)
13944 tree decl2 = DECL_VALUE_EXPR (new_var);
13945 gcc_assert (TREE_CODE (decl2) == MEM_REF);
13946 decl2 = TREE_OPERAND (decl2, 0);
13947 gcc_assert (DECL_P (decl2));
13948 new_var = decl2;
13949 type = TREE_TYPE (new_var);
13951 else if (TREE_CODE (type) == REFERENCE_TYPE
13952 && TREE_CODE (TREE_TYPE (type)) == POINTER_TYPE)
13954 type = TREE_TYPE (type);
13955 ref_to_ptr = true;
13957 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
13958 x = fold_convert_loc (clause_loc, type, x);
13959 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
13961 tree bias = OMP_CLAUSE_SIZE (c);
13962 if (DECL_P (bias))
13963 bias = lookup_decl (bias, ctx);
13964 bias = fold_convert_loc (clause_loc, sizetype, bias);
13965 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
13966 bias);
13967 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
13968 TREE_TYPE (x), x, bias);
13970 if (ref_to_array)
13971 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13972 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13973 if ((is_ref && !ref_to_array)
13974 || ref_to_ptr)
13976 tree t = create_tmp_var_raw (type, get_name (var));
13977 gimple_add_tmp_var (t);
13978 TREE_ADDRESSABLE (t) = 1;
13979 gimple_seq_add_stmt (&new_body,
13980 gimple_build_assign (t, x));
13981 x = build_fold_addr_expr_loc (clause_loc, t);
13983 gimple_seq_add_stmt (&new_body,
13984 gimple_build_assign (new_var, x));
13985 prev = NULL_TREE;
13987 else if (OMP_CLAUSE_CHAIN (c)
13988 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
13989 == OMP_CLAUSE_MAP
13990 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13991 == GOMP_MAP_FIRSTPRIVATE_POINTER
13992 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13993 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13994 prev = c;
13995 break;
13996 case OMP_CLAUSE_PRIVATE:
13997 var = OMP_CLAUSE_DECL (c);
13998 if (is_variable_sized (var))
14000 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14001 tree new_var = lookup_decl (var, ctx);
14002 tree pvar = DECL_VALUE_EXPR (var);
14003 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
14004 pvar = TREE_OPERAND (pvar, 0);
14005 gcc_assert (DECL_P (pvar));
14006 tree new_pvar = lookup_decl (pvar, ctx);
14007 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
14008 tree al = size_int (DECL_ALIGN (var));
14009 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
14010 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14011 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
14012 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14013 gimple_seq_add_stmt (&new_body,
14014 gimple_build_assign (new_pvar, x));
14016 else if (omp_privatize_by_reference (var)
14017 && !is_gimple_omp_oacc (ctx->stmt))
14019 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14020 tree new_var = lookup_decl (var, ctx);
14021 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
14022 if (TREE_CONSTANT (x))
14023 break;
14024 else
14026 tree atmp
14027 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
14028 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
14029 tree al = size_int (TYPE_ALIGN (rtype));
14030 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14033 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
14034 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14035 gimple_seq_add_stmt (&new_body,
14036 gimple_build_assign (new_var, x));
14038 break;
14041 gimple_seq fork_seq = NULL;
14042 gimple_seq join_seq = NULL;
14044 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
14046 /* If there are reductions on the offloaded region itself, treat
14047 them as a dummy GANG loop. */
14048 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
14050 gcall *private_marker = lower_oacc_private_marker (ctx);
14052 if (private_marker)
14053 gimple_call_set_arg (private_marker, 2, level);
14055 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
14056 false, NULL, private_marker, NULL, &fork_seq,
14057 &join_seq, ctx);
14060 gimple_seq_add_seq (&new_body, fork_seq);
14061 gimple_seq_add_seq (&new_body, tgt_body);
14062 gimple_seq_add_seq (&new_body, join_seq);
14064 if (offloaded)
14066 new_body = maybe_catch_exception (new_body);
14067 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
14069 gimple_omp_set_body (stmt, new_body);
14072 bind = gimple_build_bind (NULL, NULL,
14073 tgt_bind ? gimple_bind_block (tgt_bind)
14074 : NULL_TREE);
14075 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
14076 gimple_bind_add_seq (bind, ilist);
14077 gimple_bind_add_stmt (bind, stmt);
14078 gimple_bind_add_seq (bind, olist);
14080 pop_gimplify_context (NULL);
14082 if (dep_bind)
14084 gimple_bind_add_seq (dep_bind, dep_ilist);
14085 gimple_bind_add_stmt (dep_bind, bind);
14086 gimple_bind_add_seq (dep_bind, dep_olist);
14087 pop_gimplify_context (dep_bind);
14091 /* Expand code for an OpenMP teams directive. */
14093 static void
14094 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14096 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
14097 push_gimplify_context ();
14099 tree block = make_node (BLOCK);
14100 gbind *bind = gimple_build_bind (NULL, NULL, block);
14101 gsi_replace (gsi_p, bind, true);
14102 gimple_seq bind_body = NULL;
14103 gimple_seq dlist = NULL;
14104 gimple_seq olist = NULL;
14106 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14107 OMP_CLAUSE_NUM_TEAMS);
14108 tree num_teams_lower = NULL_TREE;
14109 if (num_teams == NULL_TREE)
14110 num_teams = build_int_cst (unsigned_type_node, 0);
14111 else
14113 num_teams_lower = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams);
14114 if (num_teams_lower)
14116 num_teams_lower = fold_convert (unsigned_type_node, num_teams_lower);
14117 gimplify_expr (&num_teams_lower, &bind_body, NULL, is_gimple_val,
14118 fb_rvalue);
14120 num_teams = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams);
14121 num_teams = fold_convert (unsigned_type_node, num_teams);
14122 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
14124 if (num_teams_lower == NULL_TREE)
14125 num_teams_lower = num_teams;
14126 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14127 OMP_CLAUSE_THREAD_LIMIT);
14128 if (thread_limit == NULL_TREE)
14129 thread_limit = build_int_cst (unsigned_type_node, 0);
14130 else
14132 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
14133 thread_limit = fold_convert (unsigned_type_node, thread_limit);
14134 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
14135 fb_rvalue);
14137 location_t loc = gimple_location (teams_stmt);
14138 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4);
14139 tree rettype = TREE_TYPE (TREE_TYPE (decl));
14140 tree first = create_tmp_var (rettype);
14141 gimple_seq_add_stmt (&bind_body,
14142 gimple_build_assign (first, build_one_cst (rettype)));
14143 tree llabel = create_artificial_label (loc);
14144 gimple_seq_add_stmt (&bind_body, gimple_build_label (llabel));
14145 gimple *call
14146 = gimple_build_call (decl, 4, num_teams_lower, num_teams, thread_limit,
14147 first);
14148 gimple_set_location (call, loc);
14149 tree temp = create_tmp_var (rettype);
14150 gimple_call_set_lhs (call, temp);
14151 gimple_seq_add_stmt (&bind_body, call);
14153 tree tlabel = create_artificial_label (loc);
14154 tree flabel = create_artificial_label (loc);
14155 gimple *cond = gimple_build_cond (NE_EXPR, temp, build_zero_cst (rettype),
14156 tlabel, flabel);
14157 gimple_seq_add_stmt (&bind_body, cond);
14158 gimple_seq_add_stmt (&bind_body, gimple_build_label (tlabel));
14159 gimple_seq_add_stmt (&bind_body,
14160 gimple_build_assign (first, build_zero_cst (rettype)));
14162 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
14163 &bind_body, &dlist, ctx, NULL);
14164 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
14165 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
14166 NULL, ctx);
14167 gimple_seq_add_stmt (&bind_body, teams_stmt);
14169 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
14170 gimple_omp_set_body (teams_stmt, NULL);
14171 gimple_seq_add_seq (&bind_body, olist);
14172 gimple_seq_add_seq (&bind_body, dlist);
14173 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
14174 gimple_seq_add_stmt (&bind_body, gimple_build_goto (llabel));
14175 gimple_seq_add_stmt (&bind_body, gimple_build_label (flabel));
14176 gimple_bind_set_body (bind, bind_body);
14178 pop_gimplify_context (bind);
14180 gimple_bind_append_vars (bind, ctx->block_vars);
14181 BLOCK_VARS (block) = ctx->block_vars;
14182 if (BLOCK_VARS (block))
14183 TREE_USED (block) = 1;
14186 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14187 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14188 of OMP context, but with make_addressable_vars set. */
14190 static tree
14191 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
14192 void *data)
14194 tree t = *tp;
14196 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14197 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
14198 && data == NULL
14199 && DECL_HAS_VALUE_EXPR_P (t))
14200 return t;
14202 if (make_addressable_vars
14203 && DECL_P (t)
14204 && bitmap_bit_p (make_addressable_vars, DECL_UID (t)))
14205 return t;
14207 /* If a global variable has been privatized, TREE_CONSTANT on
14208 ADDR_EXPR might be wrong. */
14209 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
14210 recompute_tree_invariant_for_addr_expr (t);
14212 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
14213 return NULL_TREE;
14216 /* Data to be communicated between lower_omp_regimplify_operands and
14217 lower_omp_regimplify_operands_p. */
14219 struct lower_omp_regimplify_operands_data
14221 omp_context *ctx;
14222 vec<tree> *decls;
14225 /* Helper function for lower_omp_regimplify_operands. Find
14226 omp_member_access_dummy_var vars and adjust temporarily their
14227 DECL_VALUE_EXPRs if needed. */
14229 static tree
14230 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
14231 void *data)
14233 tree t = omp_member_access_dummy_var (*tp);
14234 if (t)
14236 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
14237 lower_omp_regimplify_operands_data *ldata
14238 = (lower_omp_regimplify_operands_data *) wi->info;
14239 tree o = maybe_lookup_decl (t, ldata->ctx);
14240 if (o != t)
14242 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
14243 ldata->decls->safe_push (*tp);
14244 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
14245 SET_DECL_VALUE_EXPR (*tp, v);
14248 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
14249 return NULL_TREE;
14252 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14253 of omp_member_access_dummy_var vars during regimplification. */
14255 static void
14256 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
14257 gimple_stmt_iterator *gsi_p)
14259 auto_vec<tree, 10> decls;
14260 if (ctx)
14262 struct walk_stmt_info wi;
14263 memset (&wi, '\0', sizeof (wi));
14264 struct lower_omp_regimplify_operands_data data;
14265 data.ctx = ctx;
14266 data.decls = &decls;
14267 wi.info = &data;
14268 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14270 gimple_regimplify_operands (stmt, gsi_p);
14271 while (!decls.is_empty ())
14273 tree t = decls.pop ();
14274 tree v = decls.pop ();
14275 SET_DECL_VALUE_EXPR (t, v);
14279 static void
14280 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14282 gimple *stmt = gsi_stmt (*gsi_p);
14283 struct walk_stmt_info wi;
14284 gcall *call_stmt;
14286 if (gimple_has_location (stmt))
14287 input_location = gimple_location (stmt);
14289 if (make_addressable_vars)
14290 memset (&wi, '\0', sizeof (wi));
14292 /* If we have issued syntax errors, avoid doing any heavy lifting.
14293 Just replace the OMP directives with a NOP to avoid
14294 confusing RTL expansion. */
14295 if (seen_error () && is_gimple_omp (stmt))
14297 gsi_replace (gsi_p, gimple_build_nop (), true);
14298 return;
14301 switch (gimple_code (stmt))
14303 case GIMPLE_COND:
14305 gcond *cond_stmt = as_a <gcond *> (stmt);
14306 if ((ctx || make_addressable_vars)
14307 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14308 lower_omp_regimplify_p,
14309 ctx ? NULL : &wi, NULL)
14310 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14311 lower_omp_regimplify_p,
14312 ctx ? NULL : &wi, NULL)))
14313 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14315 break;
14316 case GIMPLE_CATCH:
14317 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14318 break;
14319 case GIMPLE_EH_FILTER:
14320 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14321 break;
14322 case GIMPLE_TRY:
14323 lower_omp (gimple_try_eval_ptr (stmt), ctx);
14324 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14325 break;
14326 case GIMPLE_ASSUME:
14327 lower_omp (gimple_assume_body_ptr (stmt), ctx);
14328 break;
14329 case GIMPLE_TRANSACTION:
14330 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14331 ctx);
14332 break;
14333 case GIMPLE_BIND:
14334 if (ctx && is_gimple_omp_oacc (ctx->stmt))
14336 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14337 oacc_privatization_scan_decl_chain (ctx, vars);
14339 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14340 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14341 break;
14342 case GIMPLE_OMP_PARALLEL:
14343 case GIMPLE_OMP_TASK:
14344 ctx = maybe_lookup_ctx (stmt);
14345 gcc_assert (ctx);
14346 if (ctx->cancellable)
14347 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14348 lower_omp_taskreg (gsi_p, ctx);
14349 break;
14350 case GIMPLE_OMP_FOR:
14351 ctx = maybe_lookup_ctx (stmt);
14352 gcc_assert (ctx);
14353 if (ctx->cancellable)
14354 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14355 lower_omp_for (gsi_p, ctx);
14356 break;
14357 case GIMPLE_OMP_SECTIONS:
14358 ctx = maybe_lookup_ctx (stmt);
14359 gcc_assert (ctx);
14360 if (ctx->cancellable)
14361 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14362 lower_omp_sections (gsi_p, ctx);
14363 break;
14364 case GIMPLE_OMP_SCOPE:
14365 ctx = maybe_lookup_ctx (stmt);
14366 gcc_assert (ctx);
14367 lower_omp_scope (gsi_p, ctx);
14368 break;
14369 case GIMPLE_OMP_SINGLE:
14370 ctx = maybe_lookup_ctx (stmt);
14371 gcc_assert (ctx);
14372 lower_omp_single (gsi_p, ctx);
14373 break;
14374 case GIMPLE_OMP_STRUCTURED_BLOCK:
14375 /* We have already done error checking at this point, so these nodes
14376 can be completely removed and replaced with their body. */
14377 ctx = maybe_lookup_ctx (stmt);
14378 gcc_assert (ctx);
14379 lower_omp (gimple_omp_body_ptr (stmt), ctx);
14380 gsi_replace_with_seq (gsi_p, gimple_omp_body (stmt), true);
14381 break;
14382 case GIMPLE_OMP_MASTER:
14383 case GIMPLE_OMP_MASKED:
14384 ctx = maybe_lookup_ctx (stmt);
14385 gcc_assert (ctx);
14386 lower_omp_master (gsi_p, ctx);
14387 break;
14388 case GIMPLE_OMP_TASKGROUP:
14389 ctx = maybe_lookup_ctx (stmt);
14390 gcc_assert (ctx);
14391 lower_omp_taskgroup (gsi_p, ctx);
14392 break;
14393 case GIMPLE_OMP_ORDERED:
14394 ctx = maybe_lookup_ctx (stmt);
14395 gcc_assert (ctx);
14396 lower_omp_ordered (gsi_p, ctx);
14397 break;
14398 case GIMPLE_OMP_SCAN:
14399 ctx = maybe_lookup_ctx (stmt);
14400 gcc_assert (ctx);
14401 lower_omp_scan (gsi_p, ctx);
14402 break;
14403 case GIMPLE_OMP_CRITICAL:
14404 ctx = maybe_lookup_ctx (stmt);
14405 gcc_assert (ctx);
14406 lower_omp_critical (gsi_p, ctx);
14407 break;
14408 case GIMPLE_OMP_ATOMIC_LOAD:
14409 if ((ctx || make_addressable_vars)
14410 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14411 as_a <gomp_atomic_load *> (stmt)),
14412 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14413 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14414 break;
14415 case GIMPLE_OMP_TARGET:
14416 ctx = maybe_lookup_ctx (stmt);
14417 gcc_assert (ctx);
14418 lower_omp_target (gsi_p, ctx);
14419 break;
14420 case GIMPLE_OMP_TEAMS:
14421 ctx = maybe_lookup_ctx (stmt);
14422 gcc_assert (ctx);
14423 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14424 lower_omp_taskreg (gsi_p, ctx);
14425 else
14426 lower_omp_teams (gsi_p, ctx);
14427 break;
14428 case GIMPLE_CALL:
14429 tree fndecl;
14430 call_stmt = as_a <gcall *> (stmt);
14431 fndecl = gimple_call_fndecl (call_stmt);
14432 if (fndecl
14433 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14434 switch (DECL_FUNCTION_CODE (fndecl))
14436 case BUILT_IN_GOMP_BARRIER:
14437 if (ctx == NULL)
14438 break;
14439 /* FALLTHRU */
14440 case BUILT_IN_GOMP_CANCEL:
14441 case BUILT_IN_GOMP_CANCELLATION_POINT:
14442 omp_context *cctx;
14443 cctx = ctx;
14444 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14445 cctx = cctx->outer;
14446 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14447 if (!cctx->cancellable)
14449 if (DECL_FUNCTION_CODE (fndecl)
14450 == BUILT_IN_GOMP_CANCELLATION_POINT)
14452 stmt = gimple_build_nop ();
14453 gsi_replace (gsi_p, stmt, false);
14455 break;
14457 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14459 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14460 gimple_call_set_fndecl (call_stmt, fndecl);
14461 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14463 tree lhs;
14464 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14465 gimple_call_set_lhs (call_stmt, lhs);
14466 tree fallthru_label;
14467 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14468 gimple *g;
14469 g = gimple_build_label (fallthru_label);
14470 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14471 g = gimple_build_cond (NE_EXPR, lhs,
14472 fold_convert (TREE_TYPE (lhs),
14473 boolean_false_node),
14474 cctx->cancel_label, fallthru_label);
14475 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14476 break;
14477 default:
14478 break;
14480 goto regimplify;
14482 case GIMPLE_ASSIGN:
14483 for (omp_context *up = ctx; up; up = up->outer)
14485 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14486 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14487 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14488 || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14489 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14490 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14491 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14492 && (gimple_omp_target_kind (up->stmt)
14493 == GF_OMP_TARGET_KIND_DATA)))
14494 continue;
14495 else if (!up->lastprivate_conditional_map)
14496 break;
14497 tree lhs = get_base_address (gimple_assign_lhs (stmt));
14498 if (TREE_CODE (lhs) == MEM_REF
14499 && DECL_P (TREE_OPERAND (lhs, 0))
14500 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14501 0))) == REFERENCE_TYPE)
14502 lhs = TREE_OPERAND (lhs, 0);
14503 if (DECL_P (lhs))
14504 if (tree *v = up->lastprivate_conditional_map->get (lhs))
14506 tree clauses;
14507 if (up->combined_into_simd_safelen1)
14509 up = up->outer;
14510 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14511 up = up->outer;
14513 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14514 clauses = gimple_omp_for_clauses (up->stmt);
14515 else
14516 clauses = gimple_omp_sections_clauses (up->stmt);
14517 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14518 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14519 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14520 OMP_CLAUSE__CONDTEMP_);
14521 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14522 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14523 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14526 /* FALLTHRU */
14528 default:
14529 regimplify:
14530 if ((ctx || make_addressable_vars)
14531 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14532 ctx ? NULL : &wi))
14534 /* Just remove clobbers, this should happen only if we have
14535 "privatized" local addressable variables in SIMD regions,
14536 the clobber isn't needed in that case and gimplifying address
14537 of the ARRAY_REF into a pointer and creating MEM_REF based
14538 clobber would create worse code than we get with the clobber
14539 dropped. */
14540 if (gimple_clobber_p (stmt))
14542 gsi_replace (gsi_p, gimple_build_nop (), true);
14543 break;
14545 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14547 break;
14551 static void
14552 lower_omp (gimple_seq *body, omp_context *ctx)
14554 location_t saved_location = input_location;
14555 gimple_stmt_iterator gsi;
14556 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14557 lower_omp_1 (&gsi, ctx);
14558 /* During gimplification, we haven't folded statments inside offloading
14559 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14560 if (target_nesting_level || taskreg_nesting_level)
14561 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14562 fold_stmt (&gsi);
14563 input_location = saved_location;
14566 /* Main entry point. */
14568 static unsigned int
14569 execute_lower_omp (void)
14571 gimple_seq body;
14572 int i;
14573 omp_context *ctx;
14575 /* This pass always runs, to provide PROP_gimple_lomp.
14576 But often, there is nothing to do. */
14577 if (flag_openacc == 0 && flag_openmp == 0
14578 && flag_openmp_simd == 0)
14579 return 0;
14581 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14582 delete_omp_context);
14584 body = gimple_body (current_function_decl);
14586 scan_omp (&body, NULL);
14587 gcc_assert (taskreg_nesting_level == 0);
14588 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14589 finish_taskreg_scan (ctx);
14590 taskreg_contexts.release ();
14592 if (all_contexts->root)
14594 if (make_addressable_vars)
14595 push_gimplify_context ();
14596 lower_omp (&body, NULL);
14597 if (make_addressable_vars)
14598 pop_gimplify_context (NULL);
14601 if (all_contexts)
14603 splay_tree_delete (all_contexts);
14604 all_contexts = NULL;
14606 BITMAP_FREE (make_addressable_vars);
14607 BITMAP_FREE (global_nonaddressable_vars);
14609 /* If current function is a method, remove artificial dummy VAR_DECL created
14610 for non-static data member privatization, they aren't needed for
14611 debuginfo nor anything else, have been already replaced everywhere in the
14612 IL and cause problems with LTO. */
14613 if (DECL_ARGUMENTS (current_function_decl)
14614 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14615 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14616 == POINTER_TYPE))
14617 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14619 for (auto task_stmt : task_cpyfns)
14620 finalize_task_copyfn (task_stmt);
14621 task_cpyfns.release ();
14622 return 0;
14625 namespace {
14627 const pass_data pass_data_lower_omp =
14629 GIMPLE_PASS, /* type */
14630 "omplower", /* name */
14631 OPTGROUP_OMP, /* optinfo_flags */
14632 TV_NONE, /* tv_id */
14633 PROP_gimple_any, /* properties_required */
14634 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14635 0, /* properties_destroyed */
14636 0, /* todo_flags_start */
14637 0, /* todo_flags_finish */
14640 class pass_lower_omp : public gimple_opt_pass
14642 public:
14643 pass_lower_omp (gcc::context *ctxt)
14644 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14647 /* opt_pass methods: */
14648 unsigned int execute (function *) final override
14650 return execute_lower_omp ();
14653 }; // class pass_lower_omp
14655 } // anon namespace
14657 gimple_opt_pass *
14658 make_pass_lower_omp (gcc::context *ctxt)
14660 return new pass_lower_omp (ctxt);
14663 /* The following is a utility to diagnose structured block violations.
14664 It is not part of the "omplower" pass, as that's invoked too late. It
14665 should be invoked by the respective front ends after gimplification. */
14667 static splay_tree all_labels;
14669 /* Check for mismatched contexts and generate an error if needed. Return
14670 true if an error is detected. */
14672 static bool
14673 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14674 gimple *branch_ctx, gimple *label_ctx)
14676 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14677 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14679 if (label_ctx == branch_ctx)
14680 return false;
14682 const char* kind = NULL;
14684 if (flag_openacc)
14686 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14687 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14689 gcc_checking_assert (kind == NULL);
14690 kind = "OpenACC";
14693 if (kind == NULL)
14695 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14696 kind = "OpenMP";
14699 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14700 so we could traverse it and issue a correct "exit" or "enter" error
14701 message upon a structured block violation.
14703 We built the context by building a list with tree_cons'ing, but there is
14704 no easy counterpart in gimple tuples. It seems like far too much work
14705 for issuing exit/enter error messages. If someone really misses the
14706 distinct error message... patches welcome. */
14708 #if 0
14709 /* Try to avoid confusing the user by producing and error message
14710 with correct "exit" or "enter" verbiage. We prefer "exit"
14711 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14712 if (branch_ctx == NULL)
14713 exit_p = false;
14714 else
14716 while (label_ctx)
14718 if (TREE_VALUE (label_ctx) == branch_ctx)
14720 exit_p = false;
14721 break;
14723 label_ctx = TREE_CHAIN (label_ctx);
14727 if (exit_p)
14728 error ("invalid exit from %s structured block", kind);
14729 else
14730 error ("invalid entry to %s structured block", kind);
14731 #endif
14733 /* If it's obvious we have an invalid entry, be specific about the error. */
14734 if (branch_ctx == NULL)
14735 error ("invalid entry to %s structured block", kind);
14736 else
14738 /* Otherwise, be vague and lazy, but efficient. */
14739 error ("invalid branch to/from %s structured block", kind);
14742 gsi_replace (gsi_p, gimple_build_nop (), false);
14743 return true;
14746 /* Pass 1: Create a minimal tree of structured blocks, and record
14747 where each label is found. */
14749 static tree
14750 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14751 struct walk_stmt_info *wi)
14753 gimple *context = (gimple *) wi->info;
14754 gimple *inner_context;
14755 gimple *stmt = gsi_stmt (*gsi_p);
14757 *handled_ops_p = true;
14759 switch (gimple_code (stmt))
14761 WALK_SUBSTMTS;
14763 case GIMPLE_OMP_PARALLEL:
14764 case GIMPLE_OMP_TASK:
14765 case GIMPLE_OMP_SCOPE:
14766 case GIMPLE_OMP_SECTIONS:
14767 case GIMPLE_OMP_SINGLE:
14768 case GIMPLE_OMP_SECTION:
14769 case GIMPLE_OMP_STRUCTURED_BLOCK:
14770 case GIMPLE_OMP_MASTER:
14771 case GIMPLE_OMP_MASKED:
14772 case GIMPLE_OMP_ORDERED:
14773 case GIMPLE_OMP_SCAN:
14774 case GIMPLE_OMP_CRITICAL:
14775 case GIMPLE_OMP_TARGET:
14776 case GIMPLE_OMP_TEAMS:
14777 case GIMPLE_OMP_TASKGROUP:
14778 /* The minimal context here is just the current OMP construct. */
14779 inner_context = stmt;
14780 wi->info = inner_context;
14781 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14782 wi->info = context;
14783 break;
14785 case GIMPLE_OMP_FOR:
14786 inner_context = stmt;
14787 wi->info = inner_context;
14788 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14789 walk them. */
14790 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14791 diagnose_sb_1, NULL, wi);
14792 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14793 wi->info = context;
14794 break;
14796 case GIMPLE_LABEL:
14797 splay_tree_insert (all_labels,
14798 (splay_tree_key) gimple_label_label (
14799 as_a <glabel *> (stmt)),
14800 (splay_tree_value) context);
14801 break;
14803 default:
14804 break;
14807 return NULL_TREE;
14810 /* Pass 2: Check each branch and see if its context differs from that of
14811 the destination label's context. */
14813 static tree
14814 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14815 struct walk_stmt_info *wi)
14817 gimple *context = (gimple *) wi->info;
14818 splay_tree_node n;
14819 gimple *stmt = gsi_stmt (*gsi_p);
14821 *handled_ops_p = true;
14823 switch (gimple_code (stmt))
14825 WALK_SUBSTMTS;
14827 case GIMPLE_OMP_PARALLEL:
14828 case GIMPLE_OMP_TASK:
14829 case GIMPLE_OMP_SCOPE:
14830 case GIMPLE_OMP_SECTIONS:
14831 case GIMPLE_OMP_SINGLE:
14832 case GIMPLE_OMP_SECTION:
14833 case GIMPLE_OMP_STRUCTURED_BLOCK:
14834 case GIMPLE_OMP_MASTER:
14835 case GIMPLE_OMP_MASKED:
14836 case GIMPLE_OMP_ORDERED:
14837 case GIMPLE_OMP_SCAN:
14838 case GIMPLE_OMP_CRITICAL:
14839 case GIMPLE_OMP_TARGET:
14840 case GIMPLE_OMP_TEAMS:
14841 case GIMPLE_OMP_TASKGROUP:
14842 wi->info = stmt;
14843 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14844 wi->info = context;
14845 break;
14847 case GIMPLE_OMP_FOR:
14848 wi->info = stmt;
14849 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14850 walk them. */
14851 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14852 diagnose_sb_2, NULL, wi);
14853 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14854 wi->info = context;
14855 break;
14857 case GIMPLE_COND:
14859 gcond *cond_stmt = as_a <gcond *> (stmt);
14860 tree lab = gimple_cond_true_label (cond_stmt);
14861 if (lab)
14863 n = splay_tree_lookup (all_labels,
14864 (splay_tree_key) lab);
14865 diagnose_sb_0 (gsi_p, context,
14866 n ? (gimple *) n->value : NULL);
14868 lab = gimple_cond_false_label (cond_stmt);
14869 if (lab)
14871 n = splay_tree_lookup (all_labels,
14872 (splay_tree_key) lab);
14873 diagnose_sb_0 (gsi_p, context,
14874 n ? (gimple *) n->value : NULL);
14877 break;
14879 case GIMPLE_GOTO:
14881 tree lab = gimple_goto_dest (stmt);
14882 if (TREE_CODE (lab) != LABEL_DECL)
14883 break;
14885 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14886 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14888 break;
14890 case GIMPLE_SWITCH:
14892 gswitch *switch_stmt = as_a <gswitch *> (stmt);
14893 unsigned int i;
14894 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
14896 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
14897 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14898 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
14899 break;
14902 break;
14904 case GIMPLE_RETURN:
14905 diagnose_sb_0 (gsi_p, context, NULL);
14906 break;
14908 default:
14909 break;
14912 return NULL_TREE;
14915 static unsigned int
14916 diagnose_omp_structured_block_errors (void)
14918 struct walk_stmt_info wi;
14919 gimple_seq body = gimple_body (current_function_decl);
14921 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
14923 memset (&wi, 0, sizeof (wi));
14924 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
14926 memset (&wi, 0, sizeof (wi));
14927 wi.want_locations = true;
14928 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
14930 gimple_set_body (current_function_decl, body);
14932 splay_tree_delete (all_labels);
14933 all_labels = NULL;
14935 return 0;
14938 namespace {
14940 const pass_data pass_data_diagnose_omp_blocks =
14942 GIMPLE_PASS, /* type */
14943 "*diagnose_omp_blocks", /* name */
14944 OPTGROUP_OMP, /* optinfo_flags */
14945 TV_NONE, /* tv_id */
14946 PROP_gimple_any, /* properties_required */
14947 0, /* properties_provided */
14948 0, /* properties_destroyed */
14949 0, /* todo_flags_start */
14950 0, /* todo_flags_finish */
14953 class pass_diagnose_omp_blocks : public gimple_opt_pass
14955 public:
14956 pass_diagnose_omp_blocks (gcc::context *ctxt)
14957 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
14960 /* opt_pass methods: */
14961 bool gate (function *) final override
14963 return flag_openacc || flag_openmp || flag_openmp_simd;
14965 unsigned int execute (function *) final override
14967 return diagnose_omp_structured_block_errors ();
14970 }; // class pass_diagnose_omp_blocks
14972 } // anon namespace
14974 gimple_opt_pass *
14975 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
14977 return new pass_diagnose_omp_blocks (ctxt);
14981 #include "gt-omp-low.h"