MATCH: Improve `A CMP 0 ? A : -A` set of patterns to use bitwise_equal_p.
[official-gcc.git] / gcc / omp-low.cc
blob91ef74f1f6a44bc689caaac4809409b98d394762
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2023 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "gimplify.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
154 /* True if this construct can be cancelled. */
155 bool cancellable;
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap make_addressable_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
194 static vec<gomp_task *> task_cpyfns;
196 static void scan_omp (gimple_seq *, omp_context *);
197 static tree scan_omp_1_op (tree *, int *, void *);
198 static bool omp_maybe_offloaded_ctx (omp_context *ctx);
200 #define WALK_SUBSTMTS \
201 case GIMPLE_BIND: \
202 case GIMPLE_TRY: \
203 case GIMPLE_CATCH: \
204 case GIMPLE_EH_FILTER: \
205 case GIMPLE_ASSUME: \
206 case GIMPLE_TRANSACTION: \
207 /* The sub-statements for these should be walked. */ \
208 *handled_ops_p = false; \
209 break;
211 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
212 (This doesn't include OpenACC 'kernels' decomposed parts.) */
214 static bool
215 is_oacc_parallel_or_serial (omp_context *ctx)
217 enum gimple_code outer_type = gimple_code (ctx->stmt);
218 return ((outer_type == GIMPLE_OMP_TARGET)
219 && ((gimple_omp_target_kind (ctx->stmt)
220 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
221 || (gimple_omp_target_kind (ctx->stmt)
222 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
225 /* Return whether CTX represents an OpenACC 'kernels' construct.
226 (This doesn't include OpenACC 'kernels' decomposed parts.) */
228 static bool
229 is_oacc_kernels (omp_context *ctx)
231 enum gimple_code outer_type = gimple_code (ctx->stmt);
232 return ((outer_type == GIMPLE_OMP_TARGET)
233 && (gimple_omp_target_kind (ctx->stmt)
234 == GF_OMP_TARGET_KIND_OACC_KERNELS));
237 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
239 static bool
240 is_oacc_kernels_decomposed_part (omp_context *ctx)
242 enum gimple_code outer_type = gimple_code (ctx->stmt);
243 return ((outer_type == GIMPLE_OMP_TARGET)
244 && ((gimple_omp_target_kind (ctx->stmt)
245 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
246 || (gimple_omp_target_kind (ctx->stmt)
247 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
248 || (gimple_omp_target_kind (ctx->stmt)
249 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
252 /* Return true if STMT corresponds to an OpenMP target region. */
253 static bool
254 is_omp_target (gimple *stmt)
256 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
258 int kind = gimple_omp_target_kind (stmt);
259 return (kind == GF_OMP_TARGET_KIND_REGION
260 || kind == GF_OMP_TARGET_KIND_DATA
261 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
262 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
264 return false;
267 /* If DECL is the artificial dummy VAR_DECL created for non-static
268 data member privatization, return the underlying "this" parameter,
269 otherwise return NULL. */
271 tree
272 omp_member_access_dummy_var (tree decl)
274 if (!VAR_P (decl)
275 || !DECL_ARTIFICIAL (decl)
276 || !DECL_IGNORED_P (decl)
277 || !DECL_HAS_VALUE_EXPR_P (decl)
278 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
279 return NULL_TREE;
281 tree v = DECL_VALUE_EXPR (decl);
282 if (TREE_CODE (v) != COMPONENT_REF)
283 return NULL_TREE;
285 while (1)
286 switch (TREE_CODE (v))
288 case COMPONENT_REF:
289 case MEM_REF:
290 case INDIRECT_REF:
291 CASE_CONVERT:
292 case POINTER_PLUS_EXPR:
293 v = TREE_OPERAND (v, 0);
294 continue;
295 case PARM_DECL:
296 if (DECL_CONTEXT (v) == current_function_decl
297 && DECL_ARTIFICIAL (v)
298 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
299 return v;
300 return NULL_TREE;
301 default:
302 return NULL_TREE;
306 /* Helper for unshare_and_remap, called through walk_tree. */
308 static tree
309 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
311 tree *pair = (tree *) data;
312 if (*tp == pair[0])
314 *tp = unshare_expr (pair[1]);
315 *walk_subtrees = 0;
317 else if (IS_TYPE_OR_DECL_P (*tp))
318 *walk_subtrees = 0;
319 return NULL_TREE;
322 /* Return unshare_expr (X) with all occurrences of FROM
323 replaced with TO. */
325 static tree
326 unshare_and_remap (tree x, tree from, tree to)
328 tree pair[2] = { from, to };
329 x = unshare_expr (x);
330 walk_tree (&x, unshare_and_remap_1, pair, NULL);
331 return x;
334 /* Convenience function for calling scan_omp_1_op on tree operands. */
336 static inline tree
337 scan_omp_op (tree *tp, omp_context *ctx)
339 struct walk_stmt_info wi;
341 memset (&wi, 0, sizeof (wi));
342 wi.info = ctx;
343 wi.want_locations = true;
345 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
348 static void lower_omp (gimple_seq *, omp_context *);
349 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
350 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
352 /* Return true if CTX is for an omp parallel. */
354 static inline bool
355 is_parallel_ctx (omp_context *ctx)
357 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
361 /* Return true if CTX is for an omp task. */
363 static inline bool
364 is_task_ctx (omp_context *ctx)
366 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
370 /* Return true if CTX is for an omp taskloop. */
372 static inline bool
373 is_taskloop_ctx (omp_context *ctx)
375 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
376 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
380 /* Return true if CTX is for a host omp teams. */
382 static inline bool
383 is_host_teams_ctx (omp_context *ctx)
385 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
386 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
389 /* Return true if CTX is for an omp parallel or omp task or host omp teams
390 (the last one is strictly not a task region in OpenMP speak, but we
391 need to treat it similarly). */
393 static inline bool
394 is_taskreg_ctx (omp_context *ctx)
396 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
399 /* Return true if EXPR is variable sized. */
401 static inline bool
402 is_variable_sized (const_tree expr)
404 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
407 /* Lookup variables. The "maybe" form
408 allows for the variable form to not have been entered, otherwise we
409 assert that the variable must have been entered. */
411 static inline tree
412 lookup_decl (tree var, omp_context *ctx)
414 tree *n = ctx->cb.decl_map->get (var);
415 return *n;
418 static inline tree
419 maybe_lookup_decl (const_tree var, omp_context *ctx)
421 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
422 return n ? *n : NULL_TREE;
425 static inline tree
426 lookup_field (tree var, omp_context *ctx)
428 splay_tree_node n;
429 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
430 return (tree) n->value;
433 static inline tree
434 lookup_sfield (splay_tree_key key, omp_context *ctx)
436 splay_tree_node n;
437 n = splay_tree_lookup (ctx->sfield_map
438 ? ctx->sfield_map : ctx->field_map, key);
439 return (tree) n->value;
442 static inline tree
443 lookup_sfield (tree var, omp_context *ctx)
445 return lookup_sfield ((splay_tree_key) var, ctx);
448 static inline tree
449 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
451 splay_tree_node n;
452 n = splay_tree_lookup (ctx->field_map, key);
453 return n ? (tree) n->value : NULL_TREE;
456 static inline tree
457 maybe_lookup_field (tree var, omp_context *ctx)
459 return maybe_lookup_field ((splay_tree_key) var, ctx);
462 /* Return true if DECL should be copied by pointer. SHARED_CTX is
463 the parallel context if DECL is to be shared. */
465 static bool
466 use_pointer_for_field (tree decl, omp_context *shared_ctx)
468 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
469 || TYPE_ATOMIC (TREE_TYPE (decl)))
470 return true;
472 /* We can only use copy-in/copy-out semantics for shared variables
473 when we know the value is not accessible from an outer scope. */
474 if (shared_ctx)
476 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
478 /* ??? Trivially accessible from anywhere. But why would we even
479 be passing an address in this case? Should we simply assert
480 this to be false, or should we have a cleanup pass that removes
481 these from the list of mappings? */
482 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
483 return true;
485 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
486 without analyzing the expression whether or not its location
487 is accessible to anyone else. In the case of nested parallel
488 regions it certainly may be. */
489 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
490 return true;
492 /* Do not use copy-in/copy-out for variables that have their
493 address taken. */
494 if (is_global_var (decl))
496 /* For file scope vars, track whether we've seen them as
497 non-addressable initially and in that case, keep the same
498 answer for the duration of the pass, even when they are made
499 addressable later on e.g. through reduction expansion. Global
500 variables which weren't addressable before the pass will not
501 have their privatized copies address taken. See PR91216. */
502 if (!TREE_ADDRESSABLE (decl))
504 if (!global_nonaddressable_vars)
505 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
506 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
508 else if (!global_nonaddressable_vars
509 || !bitmap_bit_p (global_nonaddressable_vars,
510 DECL_UID (decl)))
511 return true;
513 else if (TREE_ADDRESSABLE (decl))
514 return true;
516 /* lower_send_shared_vars only uses copy-in, but not copy-out
517 for these. */
518 if (TREE_READONLY (decl)
519 || ((TREE_CODE (decl) == RESULT_DECL
520 || TREE_CODE (decl) == PARM_DECL)
521 && DECL_BY_REFERENCE (decl)))
522 return false;
524 /* Disallow copy-in/out in nested parallel if
525 decl is shared in outer parallel, otherwise
526 each thread could store the shared variable
527 in its own copy-in location, making the
528 variable no longer really shared. */
529 if (shared_ctx->is_nested)
531 omp_context *up;
533 for (up = shared_ctx->outer; up; up = up->outer)
534 if ((is_taskreg_ctx (up)
535 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
536 && is_gimple_omp_offloaded (up->stmt)))
537 && maybe_lookup_decl (decl, up))
538 break;
540 if (up)
542 tree c;
544 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
546 for (c = gimple_omp_target_clauses (up->stmt);
547 c; c = OMP_CLAUSE_CHAIN (c))
548 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
549 && OMP_CLAUSE_DECL (c) == decl)
550 break;
552 else
553 for (c = gimple_omp_taskreg_clauses (up->stmt);
554 c; c = OMP_CLAUSE_CHAIN (c))
555 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
556 && OMP_CLAUSE_DECL (c) == decl)
557 break;
559 if (c)
560 goto maybe_mark_addressable_and_ret;
564 /* For tasks avoid using copy-in/out. As tasks can be
565 deferred or executed in different thread, when GOMP_task
566 returns, the task hasn't necessarily terminated. */
567 if (is_task_ctx (shared_ctx))
569 tree outer;
570 maybe_mark_addressable_and_ret:
571 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
572 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
574 /* Taking address of OUTER in lower_send_shared_vars
575 might need regimplification of everything that uses the
576 variable. */
577 if (!make_addressable_vars)
578 make_addressable_vars = BITMAP_ALLOC (NULL);
579 bitmap_set_bit (make_addressable_vars, DECL_UID (outer));
580 TREE_ADDRESSABLE (outer) = 1;
582 return true;
586 return false;
589 /* Construct a new automatic decl similar to VAR. */
591 static tree
592 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
594 tree copy = copy_var_decl (var, name, type);
596 DECL_CONTEXT (copy) = current_function_decl;
598 if (ctx)
600 DECL_CHAIN (copy) = ctx->block_vars;
601 ctx->block_vars = copy;
603 else
604 record_vars (copy);
606 /* If VAR is listed in make_addressable_vars, it wasn't
607 originally addressable, but was only later made so.
608 We don't need to take address of privatizations
609 from that var. */
610 if (TREE_ADDRESSABLE (var)
611 && ((make_addressable_vars
612 && bitmap_bit_p (make_addressable_vars, DECL_UID (var)))
613 || (global_nonaddressable_vars
614 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
615 TREE_ADDRESSABLE (copy) = 0;
617 return copy;
620 static tree
621 omp_copy_decl_1 (tree var, omp_context *ctx)
623 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
626 /* Build tree nodes to access the field for VAR on the receiver side. */
628 static tree
629 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
631 tree x, field = lookup_field (var, ctx);
633 /* If the receiver record type was remapped in the child function,
634 remap the field into the new record type. */
635 x = maybe_lookup_field (field, ctx);
636 if (x != NULL)
637 field = x;
639 x = build_simple_mem_ref (ctx->receiver_decl);
640 TREE_THIS_NOTRAP (x) = 1;
641 x = omp_build_component_ref (x, field);
642 if (by_ref)
644 x = build_simple_mem_ref (x);
645 TREE_THIS_NOTRAP (x) = 1;
648 return x;
651 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
652 of a parallel, this is a component reference; for workshare constructs
653 this is some variable. */
655 static tree
656 build_outer_var_ref (tree var, omp_context *ctx,
657 enum omp_clause_code code = OMP_CLAUSE_ERROR)
659 tree x;
660 omp_context *outer = ctx->outer;
661 for (; outer; outer = outer->outer)
663 if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
664 continue;
665 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
666 && !maybe_lookup_decl (var, outer))
667 continue;
668 break;
671 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
672 x = var;
673 else if (is_variable_sized (var))
675 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
676 x = build_outer_var_ref (x, ctx, code);
677 x = build_simple_mem_ref (x);
679 else if (is_taskreg_ctx (ctx))
681 bool by_ref = use_pointer_for_field (var, NULL);
682 x = build_receiver_ref (var, by_ref, ctx);
684 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
685 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
686 || ctx->loop_p
687 || code == OMP_CLAUSE_ALLOCATE
688 || (code == OMP_CLAUSE_PRIVATE
689 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
690 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
691 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
693 /* #pragma omp simd isn't a worksharing construct, and can reference
694 even private vars in its linear etc. clauses.
695 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
696 to private vars in all worksharing constructs. */
697 x = NULL_TREE;
698 if (outer && is_taskreg_ctx (outer))
699 x = lookup_decl (var, outer);
700 else if (outer)
701 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
702 if (x == NULL_TREE)
703 x = var;
705 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
707 gcc_assert (outer);
708 splay_tree_node n
709 = splay_tree_lookup (outer->field_map,
710 (splay_tree_key) &DECL_UID (var));
711 if (n == NULL)
713 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
714 x = var;
715 else
716 x = lookup_decl (var, outer);
718 else
720 tree field = (tree) n->value;
721 /* If the receiver record type was remapped in the child function,
722 remap the field into the new record type. */
723 x = maybe_lookup_field (field, outer);
724 if (x != NULL)
725 field = x;
727 x = build_simple_mem_ref (outer->receiver_decl);
728 x = omp_build_component_ref (x, field);
729 if (use_pointer_for_field (var, outer))
730 x = build_simple_mem_ref (x);
733 else if (outer)
734 x = lookup_decl (var, outer);
735 else if (omp_privatize_by_reference (var))
736 /* This can happen with orphaned constructs. If var is reference, it is
737 possible it is shared and as such valid. */
738 x = var;
739 else if (omp_member_access_dummy_var (var))
740 x = var;
741 else
742 gcc_unreachable ();
744 if (x == var)
746 tree t = omp_member_access_dummy_var (var);
747 if (t)
749 x = DECL_VALUE_EXPR (var);
750 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
751 if (o != t)
752 x = unshare_and_remap (x, t, o);
753 else
754 x = unshare_expr (x);
758 if (omp_privatize_by_reference (var))
759 x = build_simple_mem_ref (x);
761 return x;
764 /* Build tree nodes to access the field for VAR on the sender side. */
766 static tree
767 build_sender_ref (splay_tree_key key, omp_context *ctx)
769 tree field = lookup_sfield (key, ctx);
770 return omp_build_component_ref (ctx->sender_decl, field);
773 static tree
774 build_sender_ref (tree var, omp_context *ctx)
776 return build_sender_ref ((splay_tree_key) var, ctx);
779 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
780 BASE_POINTERS_RESTRICT, declare the field with restrict. */
782 static void
783 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
785 tree field, type, sfield = NULL_TREE;
786 splay_tree_key key = (splay_tree_key) var;
788 if ((mask & 16) != 0)
790 key = (splay_tree_key) &DECL_NAME (var);
791 gcc_checking_assert (key != (splay_tree_key) var);
793 if ((mask & 8) != 0)
795 key = (splay_tree_key) &DECL_UID (var);
796 gcc_checking_assert (key != (splay_tree_key) var);
798 gcc_assert ((mask & 1) == 0
799 || !splay_tree_lookup (ctx->field_map, key));
800 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
801 || !splay_tree_lookup (ctx->sfield_map, key));
802 gcc_assert ((mask & 3) == 3
803 || !is_gimple_omp_oacc (ctx->stmt));
805 type = TREE_TYPE (var);
806 if ((mask & 16) != 0)
807 type = lang_hooks.decls.omp_array_data (var, true);
809 /* Prevent redeclaring the var in the split-off function with a restrict
810 pointer type. Note that we only clear type itself, restrict qualifiers in
811 the pointed-to type will be ignored by points-to analysis. */
812 if (POINTER_TYPE_P (type)
813 && TYPE_RESTRICT (type))
814 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
816 if (mask & 4)
818 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
819 type = build_pointer_type (build_pointer_type (type));
821 else if (by_ref)
822 type = build_pointer_type (type);
823 else if ((mask & (32 | 3)) == 1
824 && omp_privatize_by_reference (var))
825 type = TREE_TYPE (type);
827 field = build_decl (DECL_SOURCE_LOCATION (var),
828 FIELD_DECL, DECL_NAME (var), type);
830 /* Remember what variable this field was created for. This does have a
831 side effect of making dwarf2out ignore this member, so for helpful
832 debugging we clear it later in delete_omp_context. */
833 DECL_ABSTRACT_ORIGIN (field) = var;
834 if ((mask & 16) == 0 && type == TREE_TYPE (var))
836 SET_DECL_ALIGN (field, DECL_ALIGN (var));
837 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
838 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
840 else
841 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
843 if ((mask & 3) == 3)
845 insert_field_into_struct (ctx->record_type, field);
846 if (ctx->srecord_type)
848 sfield = build_decl (DECL_SOURCE_LOCATION (var),
849 FIELD_DECL, DECL_NAME (var), type);
850 DECL_ABSTRACT_ORIGIN (sfield) = var;
851 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
852 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
853 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
854 insert_field_into_struct (ctx->srecord_type, sfield);
857 else
859 if (ctx->srecord_type == NULL_TREE)
861 tree t;
863 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
864 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
865 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
867 sfield = build_decl (DECL_SOURCE_LOCATION (t),
868 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
869 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
870 insert_field_into_struct (ctx->srecord_type, sfield);
871 splay_tree_insert (ctx->sfield_map,
872 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
873 (splay_tree_value) sfield);
876 sfield = field;
877 insert_field_into_struct ((mask & 1) ? ctx->record_type
878 : ctx->srecord_type, field);
881 if (mask & 1)
882 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
883 if ((mask & 2) && ctx->sfield_map)
884 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
887 static tree
888 install_var_local (tree var, omp_context *ctx)
890 tree new_var = omp_copy_decl_1 (var, ctx);
891 insert_decl_map (&ctx->cb, var, new_var);
892 return new_var;
895 /* Adjust the replacement for DECL in CTX for the new context. This means
896 copying the DECL_VALUE_EXPR, and fixing up the type. */
898 static void
899 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
901 tree new_decl, size;
903 new_decl = lookup_decl (decl, ctx);
905 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
907 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
908 && DECL_HAS_VALUE_EXPR_P (decl))
910 tree ve = DECL_VALUE_EXPR (decl);
911 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
912 SET_DECL_VALUE_EXPR (new_decl, ve);
913 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
916 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
918 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
919 if (size == error_mark_node)
920 size = TYPE_SIZE (TREE_TYPE (new_decl));
921 DECL_SIZE (new_decl) = size;
923 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
924 if (size == error_mark_node)
925 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
926 DECL_SIZE_UNIT (new_decl) = size;
930 /* The callback for remap_decl. Search all containing contexts for a
931 mapping of the variable; this avoids having to duplicate the splay
932 tree ahead of time. We know a mapping doesn't already exist in the
933 given context. Create new mappings to implement default semantics. */
935 static tree
936 omp_copy_decl (tree var, copy_body_data *cb)
938 omp_context *ctx = (omp_context *) cb;
939 tree new_var;
941 if (TREE_CODE (var) == LABEL_DECL)
943 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
944 return var;
945 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
946 DECL_CONTEXT (new_var) = current_function_decl;
947 insert_decl_map (&ctx->cb, var, new_var);
948 return new_var;
951 while (!is_taskreg_ctx (ctx))
953 ctx = ctx->outer;
954 if (ctx == NULL)
955 return var;
956 new_var = maybe_lookup_decl (var, ctx);
957 if (new_var)
958 return new_var;
961 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
962 return var;
964 return error_mark_node;
967 /* Create a new context, with OUTER_CTX being the surrounding context. */
969 static omp_context *
970 new_omp_context (gimple *stmt, omp_context *outer_ctx)
972 omp_context *ctx = XCNEW (omp_context);
974 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
975 (splay_tree_value) ctx);
976 ctx->stmt = stmt;
978 if (outer_ctx)
980 ctx->outer = outer_ctx;
981 ctx->cb = outer_ctx->cb;
982 ctx->cb.block = NULL;
983 ctx->depth = outer_ctx->depth + 1;
985 else
987 ctx->cb.src_fn = current_function_decl;
988 ctx->cb.dst_fn = current_function_decl;
989 ctx->cb.src_node = cgraph_node::get (current_function_decl);
990 gcc_checking_assert (ctx->cb.src_node);
991 ctx->cb.dst_node = ctx->cb.src_node;
992 ctx->cb.src_cfun = cfun;
993 ctx->cb.copy_decl = omp_copy_decl;
994 ctx->cb.eh_lp_nr = 0;
995 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
996 ctx->cb.adjust_array_error_bounds = true;
997 ctx->cb.dont_remap_vla_if_no_change = true;
998 ctx->depth = 1;
1001 ctx->cb.decl_map = new hash_map<tree, tree>;
1003 return ctx;
1006 static gimple_seq maybe_catch_exception (gimple_seq);
1008 /* Finalize task copyfn. */
1010 static void
1011 finalize_task_copyfn (gomp_task *task_stmt)
1013 struct function *child_cfun;
1014 tree child_fn;
1015 gimple_seq seq = NULL, new_seq;
1016 gbind *bind;
1018 child_fn = gimple_omp_task_copy_fn (task_stmt);
1019 if (child_fn == NULL_TREE)
1020 return;
1022 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1023 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1025 push_cfun (child_cfun);
1026 bind = gimplify_body (child_fn, false);
1027 gimple_seq_add_stmt (&seq, bind);
1028 new_seq = maybe_catch_exception (seq);
1029 if (new_seq != seq)
1031 bind = gimple_build_bind (NULL, new_seq, NULL);
1032 seq = NULL;
1033 gimple_seq_add_stmt (&seq, bind);
1035 gimple_set_body (child_fn, seq);
1036 pop_cfun ();
1038 /* Inform the callgraph about the new function. */
1039 cgraph_node *node = cgraph_node::get_create (child_fn);
1040 node->parallelized_function = 1;
1041 cgraph_node::add_new_function (child_fn, false);
1044 /* Destroy a omp_context data structures. Called through the splay tree
1045 value delete callback. */
1047 static void
1048 delete_omp_context (splay_tree_value value)
1050 omp_context *ctx = (omp_context *) value;
1052 delete ctx->cb.decl_map;
1054 if (ctx->field_map)
1055 splay_tree_delete (ctx->field_map);
1056 if (ctx->sfield_map)
1057 splay_tree_delete (ctx->sfield_map);
1059 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1060 it produces corrupt debug information. */
1061 if (ctx->record_type)
1063 tree t;
1064 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1065 DECL_ABSTRACT_ORIGIN (t) = NULL;
1067 if (ctx->srecord_type)
1069 tree t;
1070 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1071 DECL_ABSTRACT_ORIGIN (t) = NULL;
1074 if (ctx->task_reduction_map)
1076 ctx->task_reductions.release ();
1077 delete ctx->task_reduction_map;
1080 delete ctx->lastprivate_conditional_map;
1081 delete ctx->allocate_map;
1083 XDELETE (ctx);
1086 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1087 context. */
1089 static void
1090 fixup_child_record_type (omp_context *ctx)
1092 tree f, type = ctx->record_type;
1094 if (!ctx->receiver_decl)
1095 return;
1096 /* ??? It isn't sufficient to just call remap_type here, because
1097 variably_modified_type_p doesn't work the way we expect for
1098 record types. Testing each field for whether it needs remapping
1099 and creating a new record by hand works, however. */
1100 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1101 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1102 break;
1103 if (f)
1105 tree name, new_fields = NULL;
1107 type = lang_hooks.types.make_type (RECORD_TYPE);
1108 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1109 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1110 TYPE_DECL, name, type);
1111 TYPE_NAME (type) = name;
1113 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1115 tree new_f = copy_node (f);
1116 DECL_CONTEXT (new_f) = type;
1117 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1118 DECL_CHAIN (new_f) = new_fields;
1119 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1120 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1121 &ctx->cb, NULL);
1122 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1123 &ctx->cb, NULL);
1124 new_fields = new_f;
1126 /* Arrange to be able to look up the receiver field
1127 given the sender field. */
1128 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1129 (splay_tree_value) new_f);
1131 TYPE_FIELDS (type) = nreverse (new_fields);
1132 layout_type (type);
1135 /* In a target region we never modify any of the pointers in *.omp_data_i,
1136 so attempt to help the optimizers. */
1137 if (is_gimple_omp_offloaded (ctx->stmt))
1138 type = build_qualified_type (type, TYPE_QUAL_CONST);
1140 TREE_TYPE (ctx->receiver_decl)
1141 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1144 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1145 specified by CLAUSES. */
1147 static void
1148 scan_sharing_clauses (tree clauses, omp_context *ctx)
1150 tree c, decl;
1151 bool scan_array_reductions = false;
1153 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1154 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1155 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1156 /* omp_default_mem_alloc is 1 */
1157 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1158 || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1160 /* The allocate clauses that appear on a target construct or on
1161 constructs in a target region must specify an allocator expression
1162 unless a requires directive with the dynamic_allocators clause
1163 is present in the same compilation unit. */
1164 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1165 && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS) == 0)
1166 && omp_maybe_offloaded_ctx (ctx))
1167 error_at (OMP_CLAUSE_LOCATION (c), "%<allocate%> clause must"
1168 " specify an allocator here");
1169 if (ctx->allocate_map == NULL)
1170 ctx->allocate_map = new hash_map<tree, tree>;
1171 tree val = integer_zero_node;
1172 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1173 val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1174 if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1175 val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1176 ctx->allocate_map->put (OMP_CLAUSE_DECL (c), val);
1179 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1181 bool by_ref;
1183 switch (OMP_CLAUSE_CODE (c))
1185 case OMP_CLAUSE_PRIVATE:
1186 decl = OMP_CLAUSE_DECL (c);
1187 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1188 goto do_private;
1189 else if (!is_variable_sized (decl))
1190 install_var_local (decl, ctx);
1191 break;
1193 case OMP_CLAUSE_SHARED:
1194 decl = OMP_CLAUSE_DECL (c);
1195 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1196 ctx->allocate_map->remove (decl);
1197 /* Ignore shared directives in teams construct inside of
1198 target construct. */
1199 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1200 && !is_host_teams_ctx (ctx))
1202 /* Global variables don't need to be copied,
1203 the receiver side will use them directly. */
1204 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1205 if (is_global_var (odecl))
1206 break;
1207 insert_decl_map (&ctx->cb, decl, odecl);
1208 break;
1210 gcc_assert (is_taskreg_ctx (ctx));
1211 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1212 || !is_variable_sized (decl));
1213 /* Global variables don't need to be copied,
1214 the receiver side will use them directly. */
1215 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1216 break;
1217 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1219 use_pointer_for_field (decl, ctx);
1220 break;
1222 by_ref = use_pointer_for_field (decl, NULL);
1223 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1224 || TREE_ADDRESSABLE (decl)
1225 || by_ref
1226 || omp_privatize_by_reference (decl))
1228 by_ref = use_pointer_for_field (decl, ctx);
1229 install_var_field (decl, by_ref, 3, ctx);
1230 install_var_local (decl, ctx);
1231 break;
1233 /* We don't need to copy const scalar vars back. */
1234 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1235 goto do_private;
1237 case OMP_CLAUSE_REDUCTION:
1238 /* Collect 'reduction' clauses on OpenACC compute construct. */
1239 if (is_gimple_omp_oacc (ctx->stmt)
1240 && is_gimple_omp_offloaded (ctx->stmt))
1242 /* No 'reduction' clauses on OpenACC 'kernels'. */
1243 gcc_checking_assert (!is_oacc_kernels (ctx));
1244 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1245 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1247 ctx->local_reduction_clauses
1248 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1250 /* FALLTHRU */
1252 case OMP_CLAUSE_IN_REDUCTION:
1253 decl = OMP_CLAUSE_DECL (c);
1254 if (ctx->allocate_map
1255 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1256 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1257 || OMP_CLAUSE_REDUCTION_TASK (c)))
1258 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1259 || is_task_ctx (ctx)))
1261 /* For now. */
1262 if (ctx->allocate_map->get (decl))
1263 ctx->allocate_map->remove (decl);
1265 if (TREE_CODE (decl) == MEM_REF)
1267 tree t = TREE_OPERAND (decl, 0);
1268 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1269 t = TREE_OPERAND (t, 0);
1270 if (INDIRECT_REF_P (t)
1271 || TREE_CODE (t) == ADDR_EXPR)
1272 t = TREE_OPERAND (t, 0);
1273 if (is_omp_target (ctx->stmt))
1275 if (is_variable_sized (t))
1277 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1278 t = DECL_VALUE_EXPR (t);
1279 gcc_assert (INDIRECT_REF_P (t));
1280 t = TREE_OPERAND (t, 0);
1281 gcc_assert (DECL_P (t));
1283 tree at = t;
1284 if (ctx->outer)
1285 scan_omp_op (&at, ctx->outer);
1286 tree nt = omp_copy_decl_1 (at, ctx->outer);
1287 splay_tree_insert (ctx->field_map,
1288 (splay_tree_key) &DECL_CONTEXT (t),
1289 (splay_tree_value) nt);
1290 if (at != t)
1291 splay_tree_insert (ctx->field_map,
1292 (splay_tree_key) &DECL_CONTEXT (at),
1293 (splay_tree_value) nt);
1294 break;
1296 install_var_local (t, ctx);
1297 if (is_taskreg_ctx (ctx)
1298 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1299 || (is_task_ctx (ctx)
1300 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1301 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1302 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1303 == POINTER_TYPE)))))
1304 && !is_variable_sized (t)
1305 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1306 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1307 && !is_task_ctx (ctx))))
1309 by_ref = use_pointer_for_field (t, NULL);
1310 if (is_task_ctx (ctx)
1311 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1312 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1314 install_var_field (t, false, 1, ctx);
1315 install_var_field (t, by_ref, 2, ctx);
1317 else
1318 install_var_field (t, by_ref, 3, ctx);
1320 break;
1322 if (is_omp_target (ctx->stmt))
1324 tree at = decl;
1325 if (ctx->outer)
1326 scan_omp_op (&at, ctx->outer);
1327 tree nt = omp_copy_decl_1 (at, ctx->outer);
1328 splay_tree_insert (ctx->field_map,
1329 (splay_tree_key) &DECL_CONTEXT (decl),
1330 (splay_tree_value) nt);
1331 if (at != decl)
1332 splay_tree_insert (ctx->field_map,
1333 (splay_tree_key) &DECL_CONTEXT (at),
1334 (splay_tree_value) nt);
1335 break;
1337 if (is_task_ctx (ctx)
1338 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1339 && OMP_CLAUSE_REDUCTION_TASK (c)
1340 && is_parallel_ctx (ctx)))
1342 /* Global variables don't need to be copied,
1343 the receiver side will use them directly. */
1344 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1346 by_ref = use_pointer_for_field (decl, ctx);
1347 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1348 install_var_field (decl, by_ref, 3, ctx);
1350 install_var_local (decl, ctx);
1351 break;
1353 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1354 && OMP_CLAUSE_REDUCTION_TASK (c))
1356 install_var_local (decl, ctx);
1357 break;
1359 goto do_private;
1361 case OMP_CLAUSE_LASTPRIVATE:
1362 /* Let the corresponding firstprivate clause create
1363 the variable. */
1364 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1365 break;
1366 /* FALLTHRU */
1368 case OMP_CLAUSE_FIRSTPRIVATE:
1369 case OMP_CLAUSE_LINEAR:
1370 decl = OMP_CLAUSE_DECL (c);
1371 do_private:
1372 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1373 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1374 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1375 && is_gimple_omp_offloaded (ctx->stmt))
1377 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1378 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR
1379 && lang_hooks.decls.omp_array_data (decl, true)))
1381 by_ref = !omp_privatize_by_reference (decl);
1382 install_var_field (decl, by_ref, 3, ctx);
1384 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1386 if (INDIRECT_REF_P (decl))
1387 decl = TREE_OPERAND (decl, 0);
1388 install_var_field (decl, true, 3, ctx);
1390 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1391 install_var_field (decl, true, 3, ctx);
1392 else
1393 install_var_field (decl, false, 3, ctx);
1395 if (is_variable_sized (decl))
1397 if (is_task_ctx (ctx))
1399 if (ctx->allocate_map
1400 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1402 /* For now. */
1403 if (ctx->allocate_map->get (decl))
1404 ctx->allocate_map->remove (decl);
1406 install_var_field (decl, false, 1, ctx);
1408 break;
1410 else if (is_taskreg_ctx (ctx))
1412 bool global
1413 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1414 by_ref = use_pointer_for_field (decl, NULL);
1416 if (is_task_ctx (ctx)
1417 && (global || by_ref || omp_privatize_by_reference (decl)))
1419 if (ctx->allocate_map
1420 && ctx->allocate_map->get (decl))
1421 install_var_field (decl, by_ref, 32 | 1, ctx);
1422 else
1423 install_var_field (decl, false, 1, ctx);
1424 if (!global)
1425 install_var_field (decl, by_ref, 2, ctx);
1427 else if (!global)
1428 install_var_field (decl, by_ref, 3, ctx);
1430 install_var_local (decl, ctx);
1431 /* For descr arrays on target: firstprivatize data + attach ptr. */
1432 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1433 && is_gimple_omp_offloaded (ctx->stmt)
1434 && !is_gimple_omp_oacc (ctx->stmt)
1435 && lang_hooks.decls.omp_array_data (decl, true))
1437 install_var_field (decl, false, 16 | 3, ctx);
1438 install_var_field (decl, true, 8 | 3, ctx);
1440 break;
1442 case OMP_CLAUSE_USE_DEVICE_PTR:
1443 case OMP_CLAUSE_USE_DEVICE_ADDR:
1444 decl = OMP_CLAUSE_DECL (c);
1446 /* Fortran array descriptors. */
1447 if (lang_hooks.decls.omp_array_data (decl, true))
1448 install_var_field (decl, false, 19, ctx);
1449 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1450 && !omp_privatize_by_reference (decl)
1451 && !omp_is_allocatable_or_ptr (decl))
1452 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1453 install_var_field (decl, true, 11, ctx);
1454 else
1455 install_var_field (decl, false, 11, ctx);
1456 if (DECL_SIZE (decl)
1457 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1459 tree decl2 = DECL_VALUE_EXPR (decl);
1460 gcc_assert (INDIRECT_REF_P (decl2));
1461 decl2 = TREE_OPERAND (decl2, 0);
1462 gcc_assert (DECL_P (decl2));
1463 install_var_local (decl2, ctx);
1465 install_var_local (decl, ctx);
1466 break;
1468 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1469 decl = OMP_CLAUSE_DECL (c);
1470 while (INDIRECT_REF_P (decl)
1471 || TREE_CODE (decl) == ARRAY_REF)
1472 decl = TREE_OPERAND (decl, 0);
1473 goto do_private;
1475 case OMP_CLAUSE_IS_DEVICE_PTR:
1476 decl = OMP_CLAUSE_DECL (c);
1477 goto do_private;
1479 case OMP_CLAUSE__LOOPTEMP_:
1480 case OMP_CLAUSE__REDUCTEMP_:
1481 gcc_assert (is_taskreg_ctx (ctx));
1482 decl = OMP_CLAUSE_DECL (c);
1483 install_var_field (decl, false, 3, ctx);
1484 install_var_local (decl, ctx);
1485 break;
1487 case OMP_CLAUSE_COPYPRIVATE:
1488 case OMP_CLAUSE_COPYIN:
1489 decl = OMP_CLAUSE_DECL (c);
1490 by_ref = use_pointer_for_field (decl, NULL);
1491 install_var_field (decl, by_ref, 3, ctx);
1492 break;
1494 case OMP_CLAUSE_FINAL:
1495 case OMP_CLAUSE_IF:
1496 case OMP_CLAUSE_NUM_THREADS:
1497 case OMP_CLAUSE_NUM_TEAMS:
1498 case OMP_CLAUSE_THREAD_LIMIT:
1499 case OMP_CLAUSE_DEVICE:
1500 case OMP_CLAUSE_SCHEDULE:
1501 case OMP_CLAUSE_DIST_SCHEDULE:
1502 case OMP_CLAUSE_DEPEND:
1503 case OMP_CLAUSE_PRIORITY:
1504 case OMP_CLAUSE_GRAINSIZE:
1505 case OMP_CLAUSE_NUM_TASKS:
1506 case OMP_CLAUSE_NUM_GANGS:
1507 case OMP_CLAUSE_NUM_WORKERS:
1508 case OMP_CLAUSE_VECTOR_LENGTH:
1509 case OMP_CLAUSE_DETACH:
1510 case OMP_CLAUSE_FILTER:
1511 if (ctx->outer)
1512 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1513 break;
1515 case OMP_CLAUSE_TO:
1516 case OMP_CLAUSE_FROM:
1517 case OMP_CLAUSE_MAP:
1518 if (ctx->outer)
1519 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1520 decl = OMP_CLAUSE_DECL (c);
1521 /* If requested, make 'decl' addressable. */
1522 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1523 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c))
1525 gcc_checking_assert (DECL_P (decl));
1527 bool decl_addressable = TREE_ADDRESSABLE (decl);
1528 if (!decl_addressable)
1530 if (!make_addressable_vars)
1531 make_addressable_vars = BITMAP_ALLOC (NULL);
1532 bitmap_set_bit (make_addressable_vars, DECL_UID (decl));
1533 TREE_ADDRESSABLE (decl) = 1;
1536 if (dump_enabled_p ())
1538 location_t loc = OMP_CLAUSE_LOCATION (c);
1539 const dump_user_location_t d_u_loc
1540 = dump_user_location_t::from_location_t (loc);
1541 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1542 #if __GNUC__ >= 10
1543 # pragma GCC diagnostic push
1544 # pragma GCC diagnostic ignored "-Wformat"
1545 #endif
1546 if (!decl_addressable)
1547 dump_printf_loc (MSG_NOTE, d_u_loc,
1548 "variable %<%T%>"
1549 " made addressable\n",
1550 decl);
1551 else
1552 dump_printf_loc (MSG_NOTE, d_u_loc,
1553 "variable %<%T%>"
1554 " already made addressable\n",
1555 decl);
1556 #if __GNUC__ >= 10
1557 # pragma GCC diagnostic pop
1558 #endif
1561 /* Done. */
1562 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c) = 0;
1564 /* Global variables with "omp declare target" attribute
1565 don't need to be copied, the receiver side will use them
1566 directly. However, global variables with "omp declare target link"
1567 attribute need to be copied. Or when ALWAYS modifier is used. */
1568 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1569 && DECL_P (decl)
1570 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1571 && (OMP_CLAUSE_MAP_KIND (c)
1572 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1573 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1574 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1575 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1576 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1577 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1578 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1579 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_TO
1580 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_FROM
1581 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_TOFROM
1582 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1583 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1584 && varpool_node::get_create (decl)->offloadable
1585 && !lookup_attribute ("omp declare target link",
1586 DECL_ATTRIBUTES (decl)))
1587 break;
1588 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1589 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1591 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1592 not offloaded; there is nothing to map for those. */
1593 if (!is_gimple_omp_offloaded (ctx->stmt)
1594 && !POINTER_TYPE_P (TREE_TYPE (decl))
1595 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1596 break;
1598 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1599 && DECL_P (decl)
1600 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1601 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1602 && is_omp_target (ctx->stmt))
1604 /* If this is an offloaded region, an attach operation should
1605 only exist when the pointer variable is mapped in a prior
1606 clause.
1607 If we had an error, we may not have attempted to sort clauses
1608 properly, so avoid the test. */
1609 if (is_gimple_omp_offloaded (ctx->stmt)
1610 && !seen_error ())
1611 gcc_assert
1612 (maybe_lookup_decl (decl, ctx)
1613 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1614 && lookup_attribute ("omp declare target",
1615 DECL_ATTRIBUTES (decl))));
1617 /* By itself, attach/detach is generated as part of pointer
1618 variable mapping and should not create new variables in the
1619 offloaded region, however sender refs for it must be created
1620 for its address to be passed to the runtime. */
1621 tree field
1622 = build_decl (OMP_CLAUSE_LOCATION (c),
1623 FIELD_DECL, NULL_TREE, ptr_type_node);
1624 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1625 insert_field_into_struct (ctx->record_type, field);
1626 /* To not clash with a map of the pointer variable itself,
1627 attach/detach maps have their field looked up by the *clause*
1628 tree expression, not the decl. */
1629 gcc_assert (!splay_tree_lookup (ctx->field_map,
1630 (splay_tree_key) c));
1631 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1632 (splay_tree_value) field);
1633 break;
1635 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1636 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1637 || (OMP_CLAUSE_MAP_KIND (c)
1638 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1640 if (TREE_CODE (decl) == COMPONENT_REF
1641 || (INDIRECT_REF_P (decl)
1642 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1643 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1644 == REFERENCE_TYPE)
1645 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1646 == POINTER_TYPE)))))
1647 break;
1648 if (DECL_SIZE (decl)
1649 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1651 tree decl2 = DECL_VALUE_EXPR (decl);
1652 gcc_assert (INDIRECT_REF_P (decl2));
1653 decl2 = TREE_OPERAND (decl2, 0);
1654 gcc_assert (DECL_P (decl2));
1655 install_var_local (decl2, ctx);
1657 install_var_local (decl, ctx);
1658 break;
1660 if (DECL_P (decl))
1662 if (DECL_SIZE (decl)
1663 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1665 tree decl2 = DECL_VALUE_EXPR (decl);
1666 gcc_assert (INDIRECT_REF_P (decl2));
1667 decl2 = TREE_OPERAND (decl2, 0);
1668 gcc_assert (DECL_P (decl2));
1669 install_var_field (decl2, true, 3, ctx);
1670 install_var_local (decl2, ctx);
1671 install_var_local (decl, ctx);
1673 else
1675 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1676 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1677 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1678 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1679 install_var_field (decl, true, 7, ctx);
1680 else
1681 install_var_field (decl, true, 3, ctx);
1682 if (is_gimple_omp_offloaded (ctx->stmt)
1683 && !(is_gimple_omp_oacc (ctx->stmt)
1684 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1685 install_var_local (decl, ctx);
1688 else
1690 tree base = get_base_address (decl);
1691 tree nc = OMP_CLAUSE_CHAIN (c);
1692 if (DECL_P (base)
1693 && nc != NULL_TREE
1694 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1695 && OMP_CLAUSE_DECL (nc) == base
1696 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1697 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1699 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1700 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1702 else
1704 if (ctx->outer)
1706 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1707 decl = OMP_CLAUSE_DECL (c);
1709 gcc_assert (!splay_tree_lookup (ctx->field_map,
1710 (splay_tree_key) decl));
1711 tree field
1712 = build_decl (OMP_CLAUSE_LOCATION (c),
1713 FIELD_DECL, NULL_TREE, ptr_type_node);
1714 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1715 insert_field_into_struct (ctx->record_type, field);
1716 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1717 (splay_tree_value) field);
1720 break;
1722 case OMP_CLAUSE_ORDER:
1723 ctx->order_concurrent = true;
1724 break;
1726 case OMP_CLAUSE_BIND:
1727 ctx->loop_p = true;
1728 break;
1730 case OMP_CLAUSE_NOWAIT:
1731 case OMP_CLAUSE_ORDERED:
1732 case OMP_CLAUSE_COLLAPSE:
1733 case OMP_CLAUSE_UNTIED:
1734 case OMP_CLAUSE_MERGEABLE:
1735 case OMP_CLAUSE_PROC_BIND:
1736 case OMP_CLAUSE_SAFELEN:
1737 case OMP_CLAUSE_SIMDLEN:
1738 case OMP_CLAUSE_THREADS:
1739 case OMP_CLAUSE_SIMD:
1740 case OMP_CLAUSE_NOGROUP:
1741 case OMP_CLAUSE_DEFAULTMAP:
1742 case OMP_CLAUSE_ASYNC:
1743 case OMP_CLAUSE_WAIT:
1744 case OMP_CLAUSE_GANG:
1745 case OMP_CLAUSE_WORKER:
1746 case OMP_CLAUSE_VECTOR:
1747 case OMP_CLAUSE_INDEPENDENT:
1748 case OMP_CLAUSE_AUTO:
1749 case OMP_CLAUSE_SEQ:
1750 case OMP_CLAUSE_TILE:
1751 case OMP_CLAUSE__SIMT_:
1752 case OMP_CLAUSE_DEFAULT:
1753 case OMP_CLAUSE_NONTEMPORAL:
1754 case OMP_CLAUSE_IF_PRESENT:
1755 case OMP_CLAUSE_FINALIZE:
1756 case OMP_CLAUSE_TASK_REDUCTION:
1757 case OMP_CLAUSE_ALLOCATE:
1758 break;
1760 case OMP_CLAUSE_ALIGNED:
1761 decl = OMP_CLAUSE_DECL (c);
1762 if (is_global_var (decl)
1763 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1764 install_var_local (decl, ctx);
1765 break;
1767 case OMP_CLAUSE__CONDTEMP_:
1768 decl = OMP_CLAUSE_DECL (c);
1769 if (is_parallel_ctx (ctx))
1771 install_var_field (decl, false, 3, ctx);
1772 install_var_local (decl, ctx);
1774 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1775 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1776 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1777 install_var_local (decl, ctx);
1778 break;
1780 case OMP_CLAUSE__CACHE_:
1781 case OMP_CLAUSE_NOHOST:
1782 default:
1783 gcc_unreachable ();
1787 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1789 switch (OMP_CLAUSE_CODE (c))
1791 case OMP_CLAUSE_LASTPRIVATE:
1792 /* Let the corresponding firstprivate clause create
1793 the variable. */
1794 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1795 scan_array_reductions = true;
1796 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1797 break;
1798 /* FALLTHRU */
1800 case OMP_CLAUSE_FIRSTPRIVATE:
1801 case OMP_CLAUSE_PRIVATE:
1802 case OMP_CLAUSE_LINEAR:
1803 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1804 case OMP_CLAUSE_IS_DEVICE_PTR:
1805 decl = OMP_CLAUSE_DECL (c);
1806 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1808 while (INDIRECT_REF_P (decl)
1809 || TREE_CODE (decl) == ARRAY_REF)
1810 decl = TREE_OPERAND (decl, 0);
1813 if (is_variable_sized (decl))
1815 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1816 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1817 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1818 && is_gimple_omp_offloaded (ctx->stmt))
1820 tree decl2 = DECL_VALUE_EXPR (decl);
1821 gcc_assert (INDIRECT_REF_P (decl2));
1822 decl2 = TREE_OPERAND (decl2, 0);
1823 gcc_assert (DECL_P (decl2));
1824 install_var_local (decl2, ctx);
1825 fixup_remapped_decl (decl2, ctx, false);
1827 install_var_local (decl, ctx);
1829 fixup_remapped_decl (decl, ctx,
1830 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1831 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1832 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1833 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1834 scan_array_reductions = true;
1835 break;
1837 case OMP_CLAUSE_REDUCTION:
1838 case OMP_CLAUSE_IN_REDUCTION:
1839 decl = OMP_CLAUSE_DECL (c);
1840 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1842 if (is_variable_sized (decl))
1843 install_var_local (decl, ctx);
1844 fixup_remapped_decl (decl, ctx, false);
1846 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1847 scan_array_reductions = true;
1848 break;
1850 case OMP_CLAUSE_TASK_REDUCTION:
1851 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1852 scan_array_reductions = true;
1853 break;
1855 case OMP_CLAUSE_SHARED:
1856 /* Ignore shared directives in teams construct inside of
1857 target construct. */
1858 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1859 && !is_host_teams_ctx (ctx))
1860 break;
1861 decl = OMP_CLAUSE_DECL (c);
1862 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1863 break;
1864 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1866 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1867 ctx->outer)))
1868 break;
1869 bool by_ref = use_pointer_for_field (decl, ctx);
1870 install_var_field (decl, by_ref, 11, ctx);
1871 break;
1873 fixup_remapped_decl (decl, ctx, false);
1874 break;
1876 case OMP_CLAUSE_MAP:
1877 if (!is_gimple_omp_offloaded (ctx->stmt))
1878 break;
1879 decl = OMP_CLAUSE_DECL (c);
1880 if (DECL_P (decl)
1881 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1882 && (OMP_CLAUSE_MAP_KIND (c)
1883 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1884 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1885 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1886 && varpool_node::get_create (decl)->offloadable)
1887 break;
1888 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1889 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1890 && is_omp_target (ctx->stmt)
1891 && !is_gimple_omp_offloaded (ctx->stmt))
1892 break;
1893 if (DECL_P (decl))
1895 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1896 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1897 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1898 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1900 tree new_decl = lookup_decl (decl, ctx);
1901 TREE_TYPE (new_decl)
1902 = remap_type (TREE_TYPE (decl), &ctx->cb);
1904 else if (DECL_SIZE (decl)
1905 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1907 tree decl2 = DECL_VALUE_EXPR (decl);
1908 gcc_assert (INDIRECT_REF_P (decl2));
1909 decl2 = TREE_OPERAND (decl2, 0);
1910 gcc_assert (DECL_P (decl2));
1911 fixup_remapped_decl (decl2, ctx, false);
1912 fixup_remapped_decl (decl, ctx, true);
1914 else
1915 fixup_remapped_decl (decl, ctx, false);
1917 break;
1919 case OMP_CLAUSE_COPYPRIVATE:
1920 case OMP_CLAUSE_COPYIN:
1921 case OMP_CLAUSE_DEFAULT:
1922 case OMP_CLAUSE_IF:
1923 case OMP_CLAUSE_NUM_THREADS:
1924 case OMP_CLAUSE_NUM_TEAMS:
1925 case OMP_CLAUSE_THREAD_LIMIT:
1926 case OMP_CLAUSE_DEVICE:
1927 case OMP_CLAUSE_SCHEDULE:
1928 case OMP_CLAUSE_DIST_SCHEDULE:
1929 case OMP_CLAUSE_NOWAIT:
1930 case OMP_CLAUSE_ORDERED:
1931 case OMP_CLAUSE_COLLAPSE:
1932 case OMP_CLAUSE_UNTIED:
1933 case OMP_CLAUSE_FINAL:
1934 case OMP_CLAUSE_MERGEABLE:
1935 case OMP_CLAUSE_PROC_BIND:
1936 case OMP_CLAUSE_SAFELEN:
1937 case OMP_CLAUSE_SIMDLEN:
1938 case OMP_CLAUSE_ALIGNED:
1939 case OMP_CLAUSE_DEPEND:
1940 case OMP_CLAUSE_DETACH:
1941 case OMP_CLAUSE_ALLOCATE:
1942 case OMP_CLAUSE__LOOPTEMP_:
1943 case OMP_CLAUSE__REDUCTEMP_:
1944 case OMP_CLAUSE_TO:
1945 case OMP_CLAUSE_FROM:
1946 case OMP_CLAUSE_PRIORITY:
1947 case OMP_CLAUSE_GRAINSIZE:
1948 case OMP_CLAUSE_NUM_TASKS:
1949 case OMP_CLAUSE_THREADS:
1950 case OMP_CLAUSE_SIMD:
1951 case OMP_CLAUSE_NOGROUP:
1952 case OMP_CLAUSE_DEFAULTMAP:
1953 case OMP_CLAUSE_ORDER:
1954 case OMP_CLAUSE_BIND:
1955 case OMP_CLAUSE_USE_DEVICE_PTR:
1956 case OMP_CLAUSE_USE_DEVICE_ADDR:
1957 case OMP_CLAUSE_NONTEMPORAL:
1958 case OMP_CLAUSE_ASYNC:
1959 case OMP_CLAUSE_WAIT:
1960 case OMP_CLAUSE_NUM_GANGS:
1961 case OMP_CLAUSE_NUM_WORKERS:
1962 case OMP_CLAUSE_VECTOR_LENGTH:
1963 case OMP_CLAUSE_GANG:
1964 case OMP_CLAUSE_WORKER:
1965 case OMP_CLAUSE_VECTOR:
1966 case OMP_CLAUSE_INDEPENDENT:
1967 case OMP_CLAUSE_AUTO:
1968 case OMP_CLAUSE_SEQ:
1969 case OMP_CLAUSE_TILE:
1970 case OMP_CLAUSE__SIMT_:
1971 case OMP_CLAUSE_IF_PRESENT:
1972 case OMP_CLAUSE_FINALIZE:
1973 case OMP_CLAUSE_FILTER:
1974 case OMP_CLAUSE__CONDTEMP_:
1975 break;
1977 case OMP_CLAUSE__CACHE_:
1978 case OMP_CLAUSE_NOHOST:
1979 default:
1980 gcc_unreachable ();
1984 gcc_checking_assert (!scan_array_reductions
1985 || !is_gimple_omp_oacc (ctx->stmt));
1986 if (scan_array_reductions)
1988 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1989 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1990 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1991 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1992 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1994 omp_context *rctx = ctx;
1995 if (is_omp_target (ctx->stmt))
1996 rctx = ctx->outer;
1997 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1998 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
2000 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
2001 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
2002 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
2003 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
2004 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
2005 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
2009 /* Create a new name for omp child function. Returns an identifier. */
2011 static tree
2012 create_omp_child_function_name (bool task_copy)
2014 return clone_function_name_numbered (current_function_decl,
2015 task_copy ? "_omp_cpyfn" : "_omp_fn");
2018 /* Return true if CTX may belong to offloaded code: either if current function
2019 is offloaded, or any enclosing context corresponds to a target region. */
2021 static bool
2022 omp_maybe_offloaded_ctx (omp_context *ctx)
2024 if (cgraph_node::get (current_function_decl)->offloadable)
2025 return true;
2026 for (; ctx; ctx = ctx->outer)
2027 if (is_gimple_omp_offloaded (ctx->stmt))
2028 return true;
2029 return false;
2032 /* Build a decl for the omp child function. It'll not contain a body
2033 yet, just the bare decl. */
2035 static void
2036 create_omp_child_function (omp_context *ctx, bool task_copy)
2038 tree decl, type, name, t;
2040 name = create_omp_child_function_name (task_copy);
2041 if (task_copy)
2042 type = build_function_type_list (void_type_node, ptr_type_node,
2043 ptr_type_node, NULL_TREE);
2044 else
2045 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2047 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
2049 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
2050 || !task_copy);
2051 if (!task_copy)
2052 ctx->cb.dst_fn = decl;
2053 else
2054 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
2056 TREE_STATIC (decl) = 1;
2057 TREE_USED (decl) = 1;
2058 DECL_ARTIFICIAL (decl) = 1;
2059 DECL_IGNORED_P (decl) = 0;
2060 TREE_PUBLIC (decl) = 0;
2061 DECL_UNINLINABLE (decl) = 1;
2062 DECL_EXTERNAL (decl) = 0;
2063 DECL_CONTEXT (decl) = NULL_TREE;
2064 DECL_INITIAL (decl) = make_node (BLOCK);
2065 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
2066 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
2067 /* Remove omp declare simd attribute from the new attributes. */
2068 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
2070 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
2071 a = a2;
2072 a = TREE_CHAIN (a);
2073 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
2074 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
2075 *p = TREE_CHAIN (*p);
2076 else
2078 tree chain = TREE_CHAIN (*p);
2079 *p = copy_node (*p);
2080 p = &TREE_CHAIN (*p);
2081 *p = chain;
2084 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
2085 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
2086 DECL_FUNCTION_SPECIFIC_TARGET (decl)
2087 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2088 DECL_FUNCTION_VERSIONED (decl)
2089 = DECL_FUNCTION_VERSIONED (current_function_decl);
2091 if (omp_maybe_offloaded_ctx (ctx))
2093 cgraph_node::get_create (decl)->offloadable = 1;
2094 if (ENABLE_OFFLOADING)
2095 g->have_offload = true;
2098 if (cgraph_node::get_create (decl)->offloadable)
2100 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2101 ? "omp target entrypoint"
2102 : "omp declare target");
2103 if (lookup_attribute ("omp declare target",
2104 DECL_ATTRIBUTES (current_function_decl)))
2106 if (is_gimple_omp_offloaded (ctx->stmt))
2107 DECL_ATTRIBUTES (decl)
2108 = remove_attribute ("omp declare target",
2109 copy_list (DECL_ATTRIBUTES (decl)));
2110 else
2111 target_attr = NULL;
2113 if (target_attr
2114 && is_gimple_omp_offloaded (ctx->stmt)
2115 && lookup_attribute ("noclone", DECL_ATTRIBUTES (decl)) == NULL_TREE)
2116 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("noclone"),
2117 NULL_TREE, DECL_ATTRIBUTES (decl));
2118 if (target_attr)
2119 DECL_ATTRIBUTES (decl)
2120 = tree_cons (get_identifier (target_attr),
2121 NULL_TREE, DECL_ATTRIBUTES (decl));
2124 t = build_decl (DECL_SOURCE_LOCATION (decl),
2125 RESULT_DECL, NULL_TREE, void_type_node);
2126 DECL_ARTIFICIAL (t) = 1;
2127 DECL_IGNORED_P (t) = 1;
2128 DECL_CONTEXT (t) = decl;
2129 DECL_RESULT (decl) = t;
2131 tree data_name = get_identifier (".omp_data_i");
2132 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2133 ptr_type_node);
2134 DECL_ARTIFICIAL (t) = 1;
2135 DECL_NAMELESS (t) = 1;
2136 DECL_ARG_TYPE (t) = ptr_type_node;
2137 DECL_CONTEXT (t) = current_function_decl;
2138 TREE_USED (t) = 1;
2139 TREE_READONLY (t) = 1;
2140 DECL_ARGUMENTS (decl) = t;
2141 if (!task_copy)
2142 ctx->receiver_decl = t;
2143 else
2145 t = build_decl (DECL_SOURCE_LOCATION (decl),
2146 PARM_DECL, get_identifier (".omp_data_o"),
2147 ptr_type_node);
2148 DECL_ARTIFICIAL (t) = 1;
2149 DECL_NAMELESS (t) = 1;
2150 DECL_ARG_TYPE (t) = ptr_type_node;
2151 DECL_CONTEXT (t) = current_function_decl;
2152 TREE_USED (t) = 1;
2153 TREE_ADDRESSABLE (t) = 1;
2154 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2155 DECL_ARGUMENTS (decl) = t;
2158 /* Allocate memory for the function structure. The call to
2159 allocate_struct_function clobbers CFUN, so we need to restore
2160 it afterward. */
2161 push_struct_function (decl);
2162 cfun->function_end_locus = gimple_location (ctx->stmt);
2163 init_tree_ssa (cfun);
2164 pop_cfun ();
2167 /* Callback for walk_gimple_seq. Check if combined parallel
2168 contains gimple_omp_for_combined_into_p OMP_FOR. */
2170 tree
2171 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2172 bool *handled_ops_p,
2173 struct walk_stmt_info *wi)
2175 gimple *stmt = gsi_stmt (*gsi_p);
2177 *handled_ops_p = true;
2178 switch (gimple_code (stmt))
2180 WALK_SUBSTMTS;
2182 case GIMPLE_OMP_FOR:
2183 if (gimple_omp_for_combined_into_p (stmt)
2184 && gimple_omp_for_kind (stmt)
2185 == *(const enum gf_mask *) (wi->info))
2187 wi->info = stmt;
2188 return integer_zero_node;
2190 break;
2191 default:
2192 break;
2194 return NULL;
2197 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2199 static void
2200 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2201 omp_context *outer_ctx)
2203 struct walk_stmt_info wi;
2205 memset (&wi, 0, sizeof (wi));
2206 wi.val_only = true;
2207 wi.info = (void *) &msk;
2208 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2209 if (wi.info != (void *) &msk)
2211 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2212 struct omp_for_data fd;
2213 omp_extract_for_data (for_stmt, &fd, NULL);
2214 /* We need two temporaries with fd.loop.v type (istart/iend)
2215 and then (fd.collapse - 1) temporaries with the same
2216 type for count2 ... countN-1 vars if not constant. */
2217 size_t count = 2, i;
2218 tree type = fd.iter_type;
2219 if (fd.collapse > 1
2220 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2222 count += fd.collapse - 1;
2223 /* If there are lastprivate clauses on the inner
2224 GIMPLE_OMP_FOR, add one more temporaries for the total number
2225 of iterations (product of count1 ... countN-1). */
2226 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2227 OMP_CLAUSE_LASTPRIVATE)
2228 || (msk == GF_OMP_FOR_KIND_FOR
2229 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2230 OMP_CLAUSE_LASTPRIVATE)))
2232 tree temp = create_tmp_var (type);
2233 tree c = build_omp_clause (UNKNOWN_LOCATION,
2234 OMP_CLAUSE__LOOPTEMP_);
2235 insert_decl_map (&outer_ctx->cb, temp, temp);
2236 OMP_CLAUSE_DECL (c) = temp;
2237 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2238 gimple_omp_taskreg_set_clauses (stmt, c);
2240 if (fd.non_rect
2241 && fd.last_nonrect == fd.first_nonrect + 1)
2242 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2243 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2245 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2246 tree type2 = TREE_TYPE (v);
2247 count++;
2248 for (i = 0; i < 3; i++)
2250 tree temp = create_tmp_var (type2);
2251 tree c = build_omp_clause (UNKNOWN_LOCATION,
2252 OMP_CLAUSE__LOOPTEMP_);
2253 insert_decl_map (&outer_ctx->cb, temp, temp);
2254 OMP_CLAUSE_DECL (c) = temp;
2255 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2256 gimple_omp_taskreg_set_clauses (stmt, c);
2260 for (i = 0; i < count; i++)
2262 tree temp = create_tmp_var (type);
2263 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2264 insert_decl_map (&outer_ctx->cb, temp, temp);
2265 OMP_CLAUSE_DECL (c) = temp;
2266 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2267 gimple_omp_taskreg_set_clauses (stmt, c);
2270 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2271 && omp_find_clause (gimple_omp_task_clauses (stmt),
2272 OMP_CLAUSE_REDUCTION))
2274 tree type = build_pointer_type (pointer_sized_int_node);
2275 tree temp = create_tmp_var (type);
2276 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2277 insert_decl_map (&outer_ctx->cb, temp, temp);
2278 OMP_CLAUSE_DECL (c) = temp;
2279 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2280 gimple_omp_task_set_clauses (stmt, c);
2284 /* Scan an OpenMP parallel directive. */
2286 static void
2287 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2289 omp_context *ctx;
2290 tree name;
2291 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2293 /* Ignore parallel directives with empty bodies, unless there
2294 are copyin clauses. */
2295 if (optimize > 0
2296 && empty_body_p (gimple_omp_body (stmt))
2297 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2298 OMP_CLAUSE_COPYIN) == NULL)
2300 gsi_replace (gsi, gimple_build_nop (), false);
2301 return;
2304 if (gimple_omp_parallel_combined_p (stmt))
2305 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2306 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2307 OMP_CLAUSE_REDUCTION);
2308 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2309 if (OMP_CLAUSE_REDUCTION_TASK (c))
2311 tree type = build_pointer_type (pointer_sized_int_node);
2312 tree temp = create_tmp_var (type);
2313 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2314 if (outer_ctx)
2315 insert_decl_map (&outer_ctx->cb, temp, temp);
2316 OMP_CLAUSE_DECL (c) = temp;
2317 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2318 gimple_omp_parallel_set_clauses (stmt, c);
2319 break;
2321 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2322 break;
2324 ctx = new_omp_context (stmt, outer_ctx);
2325 taskreg_contexts.safe_push (ctx);
2326 if (taskreg_nesting_level > 1)
2327 ctx->is_nested = true;
2328 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2329 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2330 name = create_tmp_var_name (".omp_data_s");
2331 name = build_decl (gimple_location (stmt),
2332 TYPE_DECL, name, ctx->record_type);
2333 DECL_ARTIFICIAL (name) = 1;
2334 DECL_NAMELESS (name) = 1;
2335 TYPE_NAME (ctx->record_type) = name;
2336 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2337 create_omp_child_function (ctx, false);
2338 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2340 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2341 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2343 if (TYPE_FIELDS (ctx->record_type) == NULL)
2344 ctx->record_type = ctx->receiver_decl = NULL;
2347 /* Scan an OpenMP task directive. */
2349 static void
2350 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2352 omp_context *ctx;
2353 tree name, t;
2354 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2356 /* Ignore task directives with empty bodies, unless they have depend
2357 clause. */
2358 if (optimize > 0
2359 && gimple_omp_body (stmt)
2360 && empty_body_p (gimple_omp_body (stmt))
2361 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2363 gsi_replace (gsi, gimple_build_nop (), false);
2364 return;
2367 if (gimple_omp_task_taskloop_p (stmt))
2368 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2370 ctx = new_omp_context (stmt, outer_ctx);
2372 if (gimple_omp_task_taskwait_p (stmt))
2374 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2375 return;
2378 taskreg_contexts.safe_push (ctx);
2379 if (taskreg_nesting_level > 1)
2380 ctx->is_nested = true;
2381 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2382 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2383 name = create_tmp_var_name (".omp_data_s");
2384 name = build_decl (gimple_location (stmt),
2385 TYPE_DECL, name, ctx->record_type);
2386 DECL_ARTIFICIAL (name) = 1;
2387 DECL_NAMELESS (name) = 1;
2388 TYPE_NAME (ctx->record_type) = name;
2389 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2390 create_omp_child_function (ctx, false);
2391 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2393 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2395 if (ctx->srecord_type)
2397 name = create_tmp_var_name (".omp_data_a");
2398 name = build_decl (gimple_location (stmt),
2399 TYPE_DECL, name, ctx->srecord_type);
2400 DECL_ARTIFICIAL (name) = 1;
2401 DECL_NAMELESS (name) = 1;
2402 TYPE_NAME (ctx->srecord_type) = name;
2403 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2404 create_omp_child_function (ctx, true);
2407 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2409 if (TYPE_FIELDS (ctx->record_type) == NULL)
2411 ctx->record_type = ctx->receiver_decl = NULL;
2412 t = build_int_cst (long_integer_type_node, 0);
2413 gimple_omp_task_set_arg_size (stmt, t);
2414 t = build_int_cst (long_integer_type_node, 1);
2415 gimple_omp_task_set_arg_align (stmt, t);
2419 /* Helper function for finish_taskreg_scan, called through walk_tree.
2420 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2421 tree, replace it in the expression. */
2423 static tree
2424 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2426 if (VAR_P (*tp))
2428 omp_context *ctx = (omp_context *) data;
2429 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2430 if (t != *tp)
2432 if (DECL_HAS_VALUE_EXPR_P (t))
2433 t = unshare_expr (DECL_VALUE_EXPR (t));
2434 *tp = t;
2436 *walk_subtrees = 0;
2438 else if (IS_TYPE_OR_DECL_P (*tp))
2439 *walk_subtrees = 0;
2440 return NULL_TREE;
2443 /* If any decls have been made addressable during scan_omp,
2444 adjust their fields if needed, and layout record types
2445 of parallel/task constructs. */
2447 static void
2448 finish_taskreg_scan (omp_context *ctx)
2450 if (ctx->record_type == NULL_TREE)
2451 return;
2453 /* If any make_addressable_vars were needed, verify all
2454 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2455 statements if use_pointer_for_field hasn't changed
2456 because of that. If it did, update field types now. */
2457 if (make_addressable_vars)
2459 tree c;
2461 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2462 c; c = OMP_CLAUSE_CHAIN (c))
2463 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2464 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2466 tree decl = OMP_CLAUSE_DECL (c);
2468 /* Global variables don't need to be copied,
2469 the receiver side will use them directly. */
2470 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2471 continue;
2472 if (!bitmap_bit_p (make_addressable_vars, DECL_UID (decl))
2473 || !use_pointer_for_field (decl, ctx))
2474 continue;
2475 tree field = lookup_field (decl, ctx);
2476 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2477 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2478 continue;
2479 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2480 TREE_THIS_VOLATILE (field) = 0;
2481 DECL_USER_ALIGN (field) = 0;
2482 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2483 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2484 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2485 if (ctx->srecord_type)
2487 tree sfield = lookup_sfield (decl, ctx);
2488 TREE_TYPE (sfield) = TREE_TYPE (field);
2489 TREE_THIS_VOLATILE (sfield) = 0;
2490 DECL_USER_ALIGN (sfield) = 0;
2491 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2492 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2493 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2498 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2500 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2501 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2502 if (c)
2504 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2505 expects to find it at the start of data. */
2506 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2507 tree *p = &TYPE_FIELDS (ctx->record_type);
2508 while (*p)
2509 if (*p == f)
2511 *p = DECL_CHAIN (*p);
2512 break;
2514 else
2515 p = &DECL_CHAIN (*p);
2516 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2517 TYPE_FIELDS (ctx->record_type) = f;
2519 layout_type (ctx->record_type);
2520 fixup_child_record_type (ctx);
2522 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2524 layout_type (ctx->record_type);
2525 fixup_child_record_type (ctx);
2527 else
2529 location_t loc = gimple_location (ctx->stmt);
2530 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2531 tree detach_clause
2532 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2533 OMP_CLAUSE_DETACH);
2534 /* Move VLA fields to the end. */
2535 p = &TYPE_FIELDS (ctx->record_type);
2536 while (*p)
2537 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2538 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2540 *q = *p;
2541 *p = TREE_CHAIN (*p);
2542 TREE_CHAIN (*q) = NULL_TREE;
2543 q = &TREE_CHAIN (*q);
2545 else
2546 p = &DECL_CHAIN (*p);
2547 *p = vla_fields;
2548 if (gimple_omp_task_taskloop_p (ctx->stmt))
2550 /* Move fields corresponding to first and second _looptemp_
2551 clause first. There are filled by GOMP_taskloop
2552 and thus need to be in specific positions. */
2553 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2554 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2555 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2556 OMP_CLAUSE__LOOPTEMP_);
2557 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2558 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2559 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2560 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2561 p = &TYPE_FIELDS (ctx->record_type);
2562 while (*p)
2563 if (*p == f1 || *p == f2 || *p == f3)
2564 *p = DECL_CHAIN (*p);
2565 else
2566 p = &DECL_CHAIN (*p);
2567 DECL_CHAIN (f1) = f2;
2568 if (c3)
2570 DECL_CHAIN (f2) = f3;
2571 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2573 else
2574 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2575 TYPE_FIELDS (ctx->record_type) = f1;
2576 if (ctx->srecord_type)
2578 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2579 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2580 if (c3)
2581 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2582 p = &TYPE_FIELDS (ctx->srecord_type);
2583 while (*p)
2584 if (*p == f1 || *p == f2 || *p == f3)
2585 *p = DECL_CHAIN (*p);
2586 else
2587 p = &DECL_CHAIN (*p);
2588 DECL_CHAIN (f1) = f2;
2589 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2590 if (c3)
2592 DECL_CHAIN (f2) = f3;
2593 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2595 else
2596 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2597 TYPE_FIELDS (ctx->srecord_type) = f1;
2600 if (detach_clause)
2602 tree c, field;
2604 /* Look for a firstprivate clause with the detach event handle. */
2605 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2606 c; c = OMP_CLAUSE_CHAIN (c))
2608 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2609 continue;
2610 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2611 == OMP_CLAUSE_DECL (detach_clause))
2612 break;
2615 gcc_assert (c);
2616 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2618 /* Move field corresponding to the detach clause first.
2619 This is filled by GOMP_task and needs to be in a
2620 specific position. */
2621 p = &TYPE_FIELDS (ctx->record_type);
2622 while (*p)
2623 if (*p == field)
2624 *p = DECL_CHAIN (*p);
2625 else
2626 p = &DECL_CHAIN (*p);
2627 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2628 TYPE_FIELDS (ctx->record_type) = field;
2629 if (ctx->srecord_type)
2631 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2632 p = &TYPE_FIELDS (ctx->srecord_type);
2633 while (*p)
2634 if (*p == field)
2635 *p = DECL_CHAIN (*p);
2636 else
2637 p = &DECL_CHAIN (*p);
2638 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2639 TYPE_FIELDS (ctx->srecord_type) = field;
2642 layout_type (ctx->record_type);
2643 fixup_child_record_type (ctx);
2644 if (ctx->srecord_type)
2645 layout_type (ctx->srecord_type);
2646 tree t = fold_convert_loc (loc, long_integer_type_node,
2647 TYPE_SIZE_UNIT (ctx->record_type));
2648 if (TREE_CODE (t) != INTEGER_CST)
2650 t = unshare_expr (t);
2651 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2653 gimple_omp_task_set_arg_size (ctx->stmt, t);
2654 t = build_int_cst (long_integer_type_node,
2655 TYPE_ALIGN_UNIT (ctx->record_type));
2656 gimple_omp_task_set_arg_align (ctx->stmt, t);
2660 /* Find the enclosing offload context. */
2662 static omp_context *
2663 enclosing_target_ctx (omp_context *ctx)
2665 for (; ctx; ctx = ctx->outer)
2666 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2667 break;
2669 return ctx;
2672 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2673 construct.
2674 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2676 static bool
2677 ctx_in_oacc_kernels_region (omp_context *ctx)
2679 for (;ctx != NULL; ctx = ctx->outer)
2681 gimple *stmt = ctx->stmt;
2682 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2683 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2684 return true;
2687 return false;
2690 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2691 (This doesn't include OpenACC 'kernels' decomposed parts.)
2692 Until kernels handling moves to use the same loop indirection
2693 scheme as parallel, we need to do this checking early. */
2695 static unsigned
2696 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2698 bool checking = true;
2699 unsigned outer_mask = 0;
2700 unsigned this_mask = 0;
2701 bool has_seq = false, has_auto = false;
2703 if (ctx->outer)
2704 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2705 if (!stmt)
2707 checking = false;
2708 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2709 return outer_mask;
2710 stmt = as_a <gomp_for *> (ctx->stmt);
2713 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2715 switch (OMP_CLAUSE_CODE (c))
2717 case OMP_CLAUSE_GANG:
2718 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2719 break;
2720 case OMP_CLAUSE_WORKER:
2721 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2722 break;
2723 case OMP_CLAUSE_VECTOR:
2724 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2725 break;
2726 case OMP_CLAUSE_SEQ:
2727 has_seq = true;
2728 break;
2729 case OMP_CLAUSE_AUTO:
2730 has_auto = true;
2731 break;
2732 default:
2733 break;
2737 if (checking)
2739 if (has_seq && (this_mask || has_auto))
2740 error_at (gimple_location (stmt), "%<seq%> overrides other"
2741 " OpenACC loop specifiers");
2742 else if (has_auto && this_mask)
2743 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2744 " OpenACC loop specifiers");
2746 if (this_mask & outer_mask)
2747 error_at (gimple_location (stmt), "inner loop uses same"
2748 " OpenACC parallelism as containing loop");
2751 return outer_mask | this_mask;
2754 /* Scan a GIMPLE_OMP_FOR. */
2756 static omp_context *
2757 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2759 omp_context *ctx;
2760 size_t i;
2761 tree clauses = gimple_omp_for_clauses (stmt);
2763 ctx = new_omp_context (stmt, outer_ctx);
2765 if (is_gimple_omp_oacc (stmt))
2767 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2769 if (!(tgt && is_oacc_kernels (tgt)))
2770 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2772 tree c_op0;
2773 switch (OMP_CLAUSE_CODE (c))
2775 case OMP_CLAUSE_GANG:
2776 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2777 break;
2779 case OMP_CLAUSE_WORKER:
2780 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2781 break;
2783 case OMP_CLAUSE_VECTOR:
2784 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2785 break;
2787 default:
2788 continue;
2791 if (c_op0)
2793 /* By construction, this is impossible for OpenACC 'kernels'
2794 decomposed parts. */
2795 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2797 error_at (OMP_CLAUSE_LOCATION (c),
2798 "argument not permitted on %qs clause",
2799 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2800 if (tgt)
2801 inform (gimple_location (tgt->stmt),
2802 "enclosing parent compute construct");
2803 else if (oacc_get_fn_attrib (current_function_decl))
2804 inform (DECL_SOURCE_LOCATION (current_function_decl),
2805 "enclosing routine");
2806 else
2807 gcc_unreachable ();
2811 if (tgt && is_oacc_kernels (tgt))
2812 check_oacc_kernel_gwv (stmt, ctx);
2814 /* Collect all variables named in reductions on this loop. Ensure
2815 that, if this loop has a reduction on some variable v, and there is
2816 a reduction on v somewhere in an outer context, then there is a
2817 reduction on v on all intervening loops as well. */
2818 tree local_reduction_clauses = NULL;
2819 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2821 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2822 local_reduction_clauses
2823 = tree_cons (NULL, c, local_reduction_clauses);
2825 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2826 ctx->outer_reduction_clauses
2827 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2828 ctx->outer->outer_reduction_clauses);
2829 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2830 tree local_iter = local_reduction_clauses;
2831 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2833 tree local_clause = TREE_VALUE (local_iter);
2834 tree local_var = OMP_CLAUSE_DECL (local_clause);
2835 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2836 bool have_outer_reduction = false;
2837 tree ctx_iter = outer_reduction_clauses;
2838 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2840 tree outer_clause = TREE_VALUE (ctx_iter);
2841 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2842 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2843 if (outer_var == local_var && outer_op != local_op)
2845 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2846 "conflicting reduction operations for %qE",
2847 local_var);
2848 inform (OMP_CLAUSE_LOCATION (outer_clause),
2849 "location of the previous reduction for %qE",
2850 outer_var);
2852 if (outer_var == local_var)
2854 have_outer_reduction = true;
2855 break;
2858 if (have_outer_reduction)
2860 /* There is a reduction on outer_var both on this loop and on
2861 some enclosing loop. Walk up the context tree until such a
2862 loop with a reduction on outer_var is found, and complain
2863 about all intervening loops that do not have such a
2864 reduction. */
2865 struct omp_context *curr_loop = ctx->outer;
2866 bool found = false;
2867 while (curr_loop != NULL)
2869 tree curr_iter = curr_loop->local_reduction_clauses;
2870 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2872 tree curr_clause = TREE_VALUE (curr_iter);
2873 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2874 if (curr_var == local_var)
2876 found = true;
2877 break;
2880 if (!found)
2881 warning_at (gimple_location (curr_loop->stmt), 0,
2882 "nested loop in reduction needs "
2883 "reduction clause for %qE",
2884 local_var);
2885 else
2886 break;
2887 curr_loop = curr_loop->outer;
2891 ctx->local_reduction_clauses = local_reduction_clauses;
2892 ctx->outer_reduction_clauses
2893 = chainon (unshare_expr (ctx->local_reduction_clauses),
2894 ctx->outer_reduction_clauses);
2896 if (tgt && is_oacc_kernels (tgt))
2898 /* Strip out reductions, as they are not handled yet. */
2899 tree *prev_ptr = &clauses;
2901 while (tree probe = *prev_ptr)
2903 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2905 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2906 *prev_ptr = *next_ptr;
2907 else
2908 prev_ptr = next_ptr;
2911 gimple_omp_for_set_clauses (stmt, clauses);
2915 scan_sharing_clauses (clauses, ctx);
2917 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2918 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2920 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2921 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2922 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2923 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2925 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2926 return ctx;
2929 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2931 static void
2932 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2933 omp_context *outer_ctx)
2935 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2936 gsi_replace (gsi, bind, false);
2937 gimple_seq seq = NULL;
2938 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2939 tree cond = create_tmp_var_raw (integer_type_node);
2940 DECL_CONTEXT (cond) = current_function_decl;
2941 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2942 gimple_bind_set_vars (bind, cond);
2943 gimple_call_set_lhs (g, cond);
2944 gimple_seq_add_stmt (&seq, g);
2945 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2946 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2947 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2948 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2949 gimple_seq_add_stmt (&seq, g);
2950 g = gimple_build_label (lab1);
2951 gimple_seq_add_stmt (&seq, g);
2952 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2953 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2954 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2955 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2956 gimple_omp_for_set_clauses (new_stmt, clause);
2957 gimple_seq_add_stmt (&seq, new_stmt);
2958 g = gimple_build_goto (lab3);
2959 gimple_seq_add_stmt (&seq, g);
2960 g = gimple_build_label (lab2);
2961 gimple_seq_add_stmt (&seq, g);
2962 gimple_seq_add_stmt (&seq, stmt);
2963 g = gimple_build_label (lab3);
2964 gimple_seq_add_stmt (&seq, g);
2965 gimple_bind_set_body (bind, seq);
2966 update_stmt (bind);
2967 scan_omp_for (new_stmt, outer_ctx);
2968 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2971 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2972 struct walk_stmt_info *);
2973 static omp_context *maybe_lookup_ctx (gimple *);
2975 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2976 for scan phase loop. */
2978 static void
2979 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2980 omp_context *outer_ctx)
2982 /* The only change between inclusive and exclusive scan will be
2983 within the first simd loop, so just use inclusive in the
2984 worksharing loop. */
2985 outer_ctx->scan_inclusive = true;
2986 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2987 OMP_CLAUSE_DECL (c) = integer_zero_node;
2989 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2990 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2991 gsi_replace (gsi, input_stmt, false);
2992 gimple_seq input_body = NULL;
2993 gimple_seq_add_stmt (&input_body, stmt);
2994 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2996 gimple_stmt_iterator input1_gsi = gsi_none ();
2997 struct walk_stmt_info wi;
2998 memset (&wi, 0, sizeof (wi));
2999 wi.val_only = true;
3000 wi.info = (void *) &input1_gsi;
3001 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
3002 gcc_assert (!gsi_end_p (input1_gsi));
3004 gimple *input_stmt1 = gsi_stmt (input1_gsi);
3005 gsi_next (&input1_gsi);
3006 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
3007 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
3008 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
3009 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3010 std::swap (input_stmt1, scan_stmt1);
3012 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
3013 gimple_omp_set_body (input_stmt1, NULL);
3015 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
3016 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
3018 gimple_omp_set_body (input_stmt1, input_body1);
3019 gimple_omp_set_body (scan_stmt1, NULL);
3021 gimple_stmt_iterator input2_gsi = gsi_none ();
3022 memset (&wi, 0, sizeof (wi));
3023 wi.val_only = true;
3024 wi.info = (void *) &input2_gsi;
3025 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
3026 NULL, &wi);
3027 gcc_assert (!gsi_end_p (input2_gsi));
3029 gimple *input_stmt2 = gsi_stmt (input2_gsi);
3030 gsi_next (&input2_gsi);
3031 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
3032 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
3033 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3034 std::swap (input_stmt2, scan_stmt2);
3036 gimple_omp_set_body (input_stmt2, NULL);
3038 gimple_omp_set_body (input_stmt, input_body);
3039 gimple_omp_set_body (scan_stmt, scan_body);
3041 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
3042 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
3044 ctx = new_omp_context (scan_stmt, outer_ctx);
3045 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
3047 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
3050 /* Scan an OpenMP sections directive. */
3052 static void
3053 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
3055 omp_context *ctx;
3057 ctx = new_omp_context (stmt, outer_ctx);
3058 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
3059 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3062 /* Scan an OpenMP single directive. */
3064 static void
3065 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
3067 omp_context *ctx;
3068 tree name;
3070 ctx = new_omp_context (stmt, outer_ctx);
3071 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3072 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3073 name = create_tmp_var_name (".omp_copy_s");
3074 name = build_decl (gimple_location (stmt),
3075 TYPE_DECL, name, ctx->record_type);
3076 TYPE_NAME (ctx->record_type) = name;
3078 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
3079 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3081 if (TYPE_FIELDS (ctx->record_type) == NULL)
3082 ctx->record_type = NULL;
3083 else
3084 layout_type (ctx->record_type);
3087 /* Scan a GIMPLE_OMP_TARGET. */
3089 static void
3090 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3092 omp_context *ctx;
3093 tree name;
3094 bool offloaded = is_gimple_omp_offloaded (stmt);
3095 tree clauses = gimple_omp_target_clauses (stmt);
3097 ctx = new_omp_context (stmt, outer_ctx);
3098 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3099 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3100 name = create_tmp_var_name (".omp_data_t");
3101 name = build_decl (gimple_location (stmt),
3102 TYPE_DECL, name, ctx->record_type);
3103 DECL_ARTIFICIAL (name) = 1;
3104 DECL_NAMELESS (name) = 1;
3105 TYPE_NAME (ctx->record_type) = name;
3106 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3108 if (offloaded)
3110 create_omp_child_function (ctx, false);
3111 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3114 scan_sharing_clauses (clauses, ctx);
3115 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3117 if (TYPE_FIELDS (ctx->record_type) == NULL)
3118 ctx->record_type = ctx->receiver_decl = NULL;
3119 else
3121 TYPE_FIELDS (ctx->record_type)
3122 = nreverse (TYPE_FIELDS (ctx->record_type));
3123 if (flag_checking)
3125 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3126 for (tree field = TYPE_FIELDS (ctx->record_type);
3127 field;
3128 field = DECL_CHAIN (field))
3129 gcc_assert (DECL_ALIGN (field) == align);
3131 layout_type (ctx->record_type);
3132 if (offloaded)
3133 fixup_child_record_type (ctx);
3136 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3138 error_at (gimple_location (stmt),
3139 "%<target%> construct with nested %<teams%> construct "
3140 "contains directives outside of the %<teams%> construct");
3141 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3145 /* Scan an OpenMP teams directive. */
3147 static void
3148 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3150 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3152 if (!gimple_omp_teams_host (stmt))
3154 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3155 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3156 return;
3158 taskreg_contexts.safe_push (ctx);
3159 gcc_assert (taskreg_nesting_level == 1);
3160 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3161 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3162 tree name = create_tmp_var_name (".omp_data_s");
3163 name = build_decl (gimple_location (stmt),
3164 TYPE_DECL, name, ctx->record_type);
3165 DECL_ARTIFICIAL (name) = 1;
3166 DECL_NAMELESS (name) = 1;
3167 TYPE_NAME (ctx->record_type) = name;
3168 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3169 create_omp_child_function (ctx, false);
3170 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3172 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3173 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3175 if (TYPE_FIELDS (ctx->record_type) == NULL)
3176 ctx->record_type = ctx->receiver_decl = NULL;
3179 /* Check nesting restrictions. */
3180 static bool
3181 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3183 tree c;
3185 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3186 inside an OpenACC CTX. */
3187 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3188 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3189 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3191 else if (!(is_gimple_omp (stmt)
3192 && is_gimple_omp_oacc (stmt)))
3194 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3196 error_at (gimple_location (stmt),
3197 "non-OpenACC construct inside of OpenACC routine");
3198 return false;
3200 else
3201 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3202 if (is_gimple_omp (octx->stmt)
3203 && is_gimple_omp_oacc (octx->stmt))
3205 error_at (gimple_location (stmt),
3206 "non-OpenACC construct inside of OpenACC region");
3207 return false;
3211 if (ctx != NULL)
3213 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3214 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3216 c = omp_find_clause (gimple_omp_target_clauses (ctx->stmt),
3217 OMP_CLAUSE_DEVICE);
3218 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3220 error_at (gimple_location (stmt),
3221 "OpenMP constructs are not allowed in target region "
3222 "with %<ancestor%>");
3223 return false;
3226 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3227 ctx->teams_nested_p = true;
3228 else
3229 ctx->nonteams_nested_p = true;
3231 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3232 && ctx->outer
3233 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3234 ctx = ctx->outer;
3235 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3236 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3237 && !ctx->loop_p)
3239 c = NULL_TREE;
3240 if (ctx->order_concurrent
3241 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3242 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3243 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3245 error_at (gimple_location (stmt),
3246 "OpenMP constructs other than %<parallel%>, %<loop%>"
3247 " or %<simd%> may not be nested inside a region with"
3248 " the %<order(concurrent)%> clause");
3249 return false;
3251 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3253 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3254 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3256 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3257 && (ctx->outer == NULL
3258 || !gimple_omp_for_combined_into_p (ctx->stmt)
3259 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3260 || (gimple_omp_for_kind (ctx->outer->stmt)
3261 != GF_OMP_FOR_KIND_FOR)
3262 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3264 error_at (gimple_location (stmt),
3265 "%<ordered simd threads%> must be closely "
3266 "nested inside of %<%s simd%> region",
3267 lang_GNU_Fortran () ? "do" : "for");
3268 return false;
3270 return true;
3273 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3274 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3275 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3276 return true;
3277 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3278 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3279 return true;
3280 error_at (gimple_location (stmt),
3281 "OpenMP constructs other than "
3282 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3283 "not be nested inside %<simd%> region");
3284 return false;
3286 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3288 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3289 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3290 && omp_find_clause (gimple_omp_for_clauses (stmt),
3291 OMP_CLAUSE_BIND) == NULL_TREE))
3292 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3294 error_at (gimple_location (stmt),
3295 "only %<distribute%>, %<parallel%> or %<loop%> "
3296 "regions are allowed to be strictly nested inside "
3297 "%<teams%> region");
3298 return false;
3301 else if (ctx->order_concurrent
3302 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3303 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3304 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3305 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3307 if (ctx->loop_p)
3308 error_at (gimple_location (stmt),
3309 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3310 "%<simd%> may not be nested inside a %<loop%> region");
3311 else
3312 error_at (gimple_location (stmt),
3313 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3314 "%<simd%> may not be nested inside a region with "
3315 "the %<order(concurrent)%> clause");
3316 return false;
3319 switch (gimple_code (stmt))
3321 case GIMPLE_OMP_FOR:
3322 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3323 return true;
3324 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3326 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3328 error_at (gimple_location (stmt),
3329 "%<distribute%> region must be strictly nested "
3330 "inside %<teams%> construct");
3331 return false;
3333 return true;
3335 /* We split taskloop into task and nested taskloop in it. */
3336 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3337 return true;
3338 /* For now, hope this will change and loop bind(parallel) will not
3339 be allowed in lots of contexts. */
3340 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3341 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3342 return true;
3343 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3345 bool ok = false;
3347 if (ctx)
3348 switch (gimple_code (ctx->stmt))
3350 case GIMPLE_OMP_FOR:
3351 ok = (gimple_omp_for_kind (ctx->stmt)
3352 == GF_OMP_FOR_KIND_OACC_LOOP);
3353 break;
3355 case GIMPLE_OMP_TARGET:
3356 switch (gimple_omp_target_kind (ctx->stmt))
3358 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3359 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3360 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3361 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3362 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3363 ok = true;
3364 break;
3366 default:
3367 break;
3370 default:
3371 break;
3373 else if (oacc_get_fn_attrib (current_function_decl))
3374 ok = true;
3375 if (!ok)
3377 error_at (gimple_location (stmt),
3378 "OpenACC loop directive must be associated with"
3379 " an OpenACC compute region");
3380 return false;
3383 /* FALLTHRU */
3384 case GIMPLE_CALL:
3385 if (is_gimple_call (stmt)
3386 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3387 == BUILT_IN_GOMP_CANCEL
3388 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3389 == BUILT_IN_GOMP_CANCELLATION_POINT))
3391 const char *bad = NULL;
3392 const char *kind = NULL;
3393 const char *construct
3394 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3395 == BUILT_IN_GOMP_CANCEL)
3396 ? "cancel"
3397 : "cancellation point";
3398 if (ctx == NULL)
3400 error_at (gimple_location (stmt), "orphaned %qs construct",
3401 construct);
3402 return false;
3404 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3405 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3406 : 0)
3408 case 1:
3409 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3410 bad = "parallel";
3411 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3412 == BUILT_IN_GOMP_CANCEL
3413 && !integer_zerop (gimple_call_arg (stmt, 1)))
3414 ctx->cancellable = true;
3415 kind = "parallel";
3416 break;
3417 case 2:
3418 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3419 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3420 bad = "for";
3421 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3422 == BUILT_IN_GOMP_CANCEL
3423 && !integer_zerop (gimple_call_arg (stmt, 1)))
3425 ctx->cancellable = true;
3426 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3427 OMP_CLAUSE_NOWAIT))
3428 warning_at (gimple_location (stmt), 0,
3429 "%<cancel for%> inside "
3430 "%<nowait%> for construct");
3431 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3432 OMP_CLAUSE_ORDERED))
3433 warning_at (gimple_location (stmt), 0,
3434 "%<cancel for%> inside "
3435 "%<ordered%> for construct");
3437 kind = "for";
3438 break;
3439 case 4:
3440 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3441 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3442 bad = "sections";
3443 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3444 == BUILT_IN_GOMP_CANCEL
3445 && !integer_zerop (gimple_call_arg (stmt, 1)))
3447 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3449 ctx->cancellable = true;
3450 if (omp_find_clause (gimple_omp_sections_clauses
3451 (ctx->stmt),
3452 OMP_CLAUSE_NOWAIT))
3453 warning_at (gimple_location (stmt), 0,
3454 "%<cancel sections%> inside "
3455 "%<nowait%> sections construct");
3457 else
3459 gcc_assert (ctx->outer
3460 && gimple_code (ctx->outer->stmt)
3461 == GIMPLE_OMP_SECTIONS);
3462 ctx->outer->cancellable = true;
3463 if (omp_find_clause (gimple_omp_sections_clauses
3464 (ctx->outer->stmt),
3465 OMP_CLAUSE_NOWAIT))
3466 warning_at (gimple_location (stmt), 0,
3467 "%<cancel sections%> inside "
3468 "%<nowait%> sections construct");
3471 kind = "sections";
3472 break;
3473 case 8:
3474 if (!is_task_ctx (ctx)
3475 && (!is_taskloop_ctx (ctx)
3476 || ctx->outer == NULL
3477 || !is_task_ctx (ctx->outer)))
3478 bad = "task";
3479 else
3481 for (omp_context *octx = ctx->outer;
3482 octx; octx = octx->outer)
3484 switch (gimple_code (octx->stmt))
3486 case GIMPLE_OMP_TASKGROUP:
3487 break;
3488 case GIMPLE_OMP_TARGET:
3489 if (gimple_omp_target_kind (octx->stmt)
3490 != GF_OMP_TARGET_KIND_REGION)
3491 continue;
3492 /* FALLTHRU */
3493 case GIMPLE_OMP_PARALLEL:
3494 case GIMPLE_OMP_TEAMS:
3495 error_at (gimple_location (stmt),
3496 "%<%s taskgroup%> construct not closely "
3497 "nested inside of %<taskgroup%> region",
3498 construct);
3499 return false;
3500 case GIMPLE_OMP_TASK:
3501 if (gimple_omp_task_taskloop_p (octx->stmt)
3502 && octx->outer
3503 && is_taskloop_ctx (octx->outer))
3505 tree clauses
3506 = gimple_omp_for_clauses (octx->outer->stmt);
3507 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3508 break;
3510 continue;
3511 default:
3512 continue;
3514 break;
3516 ctx->cancellable = true;
3518 kind = "taskgroup";
3519 break;
3520 default:
3521 error_at (gimple_location (stmt), "invalid arguments");
3522 return false;
3524 if (bad)
3526 error_at (gimple_location (stmt),
3527 "%<%s %s%> construct not closely nested inside of %qs",
3528 construct, kind, bad);
3529 return false;
3532 /* FALLTHRU */
3533 case GIMPLE_OMP_SECTIONS:
3534 case GIMPLE_OMP_SINGLE:
3535 for (; ctx != NULL; ctx = ctx->outer)
3536 switch (gimple_code (ctx->stmt))
3538 case GIMPLE_OMP_FOR:
3539 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3540 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3541 break;
3542 /* FALLTHRU */
3543 case GIMPLE_OMP_SECTIONS:
3544 case GIMPLE_OMP_SINGLE:
3545 case GIMPLE_OMP_ORDERED:
3546 case GIMPLE_OMP_MASTER:
3547 case GIMPLE_OMP_MASKED:
3548 case GIMPLE_OMP_TASK:
3549 case GIMPLE_OMP_CRITICAL:
3550 if (is_gimple_call (stmt))
3552 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3553 != BUILT_IN_GOMP_BARRIER)
3554 return true;
3555 error_at (gimple_location (stmt),
3556 "barrier region may not be closely nested inside "
3557 "of work-sharing, %<loop%>, %<critical%>, "
3558 "%<ordered%>, %<master%>, %<masked%>, explicit "
3559 "%<task%> or %<taskloop%> region");
3560 return false;
3562 error_at (gimple_location (stmt),
3563 "work-sharing region may not be closely nested inside "
3564 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3565 "%<master%>, %<masked%>, explicit %<task%> or "
3566 "%<taskloop%> region");
3567 return false;
3568 case GIMPLE_OMP_PARALLEL:
3569 case GIMPLE_OMP_TEAMS:
3570 return true;
3571 case GIMPLE_OMP_TARGET:
3572 if (gimple_omp_target_kind (ctx->stmt)
3573 == GF_OMP_TARGET_KIND_REGION)
3574 return true;
3575 break;
3576 default:
3577 break;
3579 break;
3580 case GIMPLE_OMP_MASTER:
3581 case GIMPLE_OMP_MASKED:
3582 for (; ctx != NULL; ctx = ctx->outer)
3583 switch (gimple_code (ctx->stmt))
3585 case GIMPLE_OMP_FOR:
3586 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3587 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3588 break;
3589 /* FALLTHRU */
3590 case GIMPLE_OMP_SECTIONS:
3591 case GIMPLE_OMP_SINGLE:
3592 case GIMPLE_OMP_TASK:
3593 error_at (gimple_location (stmt),
3594 "%qs region may not be closely nested inside "
3595 "of work-sharing, %<loop%>, explicit %<task%> or "
3596 "%<taskloop%> region",
3597 gimple_code (stmt) == GIMPLE_OMP_MASTER
3598 ? "master" : "masked");
3599 return false;
3600 case GIMPLE_OMP_PARALLEL:
3601 case GIMPLE_OMP_TEAMS:
3602 return true;
3603 case GIMPLE_OMP_TARGET:
3604 if (gimple_omp_target_kind (ctx->stmt)
3605 == GF_OMP_TARGET_KIND_REGION)
3606 return true;
3607 break;
3608 default:
3609 break;
3611 break;
3612 case GIMPLE_OMP_SCOPE:
3613 for (; ctx != NULL; ctx = ctx->outer)
3614 switch (gimple_code (ctx->stmt))
3616 case GIMPLE_OMP_FOR:
3617 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3618 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3619 break;
3620 /* FALLTHRU */
3621 case GIMPLE_OMP_SECTIONS:
3622 case GIMPLE_OMP_SINGLE:
3623 case GIMPLE_OMP_TASK:
3624 case GIMPLE_OMP_CRITICAL:
3625 case GIMPLE_OMP_ORDERED:
3626 case GIMPLE_OMP_MASTER:
3627 case GIMPLE_OMP_MASKED:
3628 error_at (gimple_location (stmt),
3629 "%<scope%> region may not be closely nested inside "
3630 "of work-sharing, %<loop%>, explicit %<task%>, "
3631 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3632 "or %<masked%> region");
3633 return false;
3634 case GIMPLE_OMP_PARALLEL:
3635 case GIMPLE_OMP_TEAMS:
3636 return true;
3637 case GIMPLE_OMP_TARGET:
3638 if (gimple_omp_target_kind (ctx->stmt)
3639 == GF_OMP_TARGET_KIND_REGION)
3640 return true;
3641 break;
3642 default:
3643 break;
3645 break;
3646 case GIMPLE_OMP_TASK:
3647 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3648 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS)
3650 enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c);
3651 error_at (OMP_CLAUSE_LOCATION (c),
3652 "%<%s(%s)%> is only allowed in %<omp ordered%>",
3653 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross",
3654 kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink");
3655 return false;
3657 break;
3658 case GIMPLE_OMP_ORDERED:
3659 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3660 c; c = OMP_CLAUSE_CHAIN (c))
3662 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS)
3664 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
3666 error_at (OMP_CLAUSE_LOCATION (c),
3667 "invalid depend kind in omp %<ordered%> %<depend%>");
3668 return false;
3670 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3671 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3672 continue;
3675 tree oclause;
3676 /* Look for containing ordered(N) loop. */
3677 if (ctx == NULL
3678 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3679 || (oclause
3680 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3681 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3683 error_at (OMP_CLAUSE_LOCATION (c),
3684 "%<ordered%> construct with %<depend%> clause "
3685 "must be closely nested inside an %<ordered%> loop");
3686 return false;
3689 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3690 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3692 /* ordered simd must be closely nested inside of simd region,
3693 and simd region must not encounter constructs other than
3694 ordered simd, therefore ordered simd may be either orphaned,
3695 or ctx->stmt must be simd. The latter case is handled already
3696 earlier. */
3697 if (ctx != NULL)
3699 error_at (gimple_location (stmt),
3700 "%<ordered%> %<simd%> must be closely nested inside "
3701 "%<simd%> region");
3702 return false;
3705 for (; ctx != NULL; ctx = ctx->outer)
3706 switch (gimple_code (ctx->stmt))
3708 case GIMPLE_OMP_CRITICAL:
3709 case GIMPLE_OMP_TASK:
3710 case GIMPLE_OMP_ORDERED:
3711 ordered_in_taskloop:
3712 error_at (gimple_location (stmt),
3713 "%<ordered%> region may not be closely nested inside "
3714 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3715 "%<taskloop%> region");
3716 return false;
3717 case GIMPLE_OMP_FOR:
3718 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3719 goto ordered_in_taskloop;
3720 tree o;
3721 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3722 OMP_CLAUSE_ORDERED);
3723 if (o == NULL)
3725 error_at (gimple_location (stmt),
3726 "%<ordered%> region must be closely nested inside "
3727 "a loop region with an %<ordered%> clause");
3728 return false;
3730 if (!gimple_omp_ordered_standalone_p (stmt))
3732 if (OMP_CLAUSE_ORDERED_DOACROSS (o))
3734 error_at (gimple_location (stmt),
3735 "%<ordered%> construct without %<doacross%> or "
3736 "%<depend%> clauses must not have the same "
3737 "binding region as %<ordered%> construct with "
3738 "those clauses");
3739 return false;
3741 else if (OMP_CLAUSE_ORDERED_EXPR (o))
3743 tree co
3744 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3745 OMP_CLAUSE_COLLAPSE);
3746 HOST_WIDE_INT
3747 o_n = tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (o));
3748 HOST_WIDE_INT c_n = 1;
3749 if (co)
3750 c_n = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (co));
3751 if (o_n != c_n)
3753 error_at (gimple_location (stmt),
3754 "%<ordered%> construct without %<doacross%> "
3755 "or %<depend%> clauses binds to loop where "
3756 "%<collapse%> argument %wd is different from "
3757 "%<ordered%> argument %wd", c_n, o_n);
3758 return false;
3762 return true;
3763 case GIMPLE_OMP_TARGET:
3764 if (gimple_omp_target_kind (ctx->stmt)
3765 != GF_OMP_TARGET_KIND_REGION)
3766 break;
3767 /* FALLTHRU */
3768 case GIMPLE_OMP_PARALLEL:
3769 case GIMPLE_OMP_TEAMS:
3770 error_at (gimple_location (stmt),
3771 "%<ordered%> region must be closely nested inside "
3772 "a loop region with an %<ordered%> clause");
3773 return false;
3774 default:
3775 break;
3777 break;
3778 case GIMPLE_OMP_CRITICAL:
3780 tree this_stmt_name
3781 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3782 for (; ctx != NULL; ctx = ctx->outer)
3783 if (gomp_critical *other_crit
3784 = dyn_cast <gomp_critical *> (ctx->stmt))
3785 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3787 error_at (gimple_location (stmt),
3788 "%<critical%> region may not be nested inside "
3789 "a %<critical%> region with the same name");
3790 return false;
3793 break;
3794 case GIMPLE_OMP_TEAMS:
3795 if (ctx == NULL)
3796 break;
3797 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3798 || (gimple_omp_target_kind (ctx->stmt)
3799 != GF_OMP_TARGET_KIND_REGION))
3801 /* Teams construct can appear either strictly nested inside of
3802 target construct with no intervening stmts, or can be encountered
3803 only by initial task (so must not appear inside any OpenMP
3804 construct. */
3805 error_at (gimple_location (stmt),
3806 "%<teams%> construct must be closely nested inside of "
3807 "%<target%> construct or not nested in any OpenMP "
3808 "construct");
3809 return false;
3811 break;
3812 case GIMPLE_OMP_TARGET:
3813 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3814 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS)
3816 enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c);
3817 error_at (OMP_CLAUSE_LOCATION (c),
3818 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3819 kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink");
3820 return false;
3822 if (is_gimple_omp_offloaded (stmt)
3823 && oacc_get_fn_attrib (cfun->decl) != NULL)
3825 error_at (gimple_location (stmt),
3826 "OpenACC region inside of OpenACC routine, nested "
3827 "parallelism not supported yet");
3828 return false;
3830 for (; ctx != NULL; ctx = ctx->outer)
3832 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3834 if (is_gimple_omp (stmt)
3835 && is_gimple_omp_oacc (stmt)
3836 && is_gimple_omp (ctx->stmt))
3838 error_at (gimple_location (stmt),
3839 "OpenACC construct inside of non-OpenACC region");
3840 return false;
3842 continue;
3845 const char *stmt_name, *ctx_stmt_name;
3846 switch (gimple_omp_target_kind (stmt))
3848 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3849 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3850 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3851 case GF_OMP_TARGET_KIND_ENTER_DATA:
3852 stmt_name = "target enter data"; break;
3853 case GF_OMP_TARGET_KIND_EXIT_DATA:
3854 stmt_name = "target exit data"; break;
3855 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3856 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3857 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3858 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3859 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3860 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3861 stmt_name = "enter data"; break;
3862 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3863 stmt_name = "exit data"; break;
3864 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3865 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3866 break;
3867 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3868 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3869 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3870 /* OpenACC 'kernels' decomposed parts. */
3871 stmt_name = "kernels"; break;
3872 default: gcc_unreachable ();
3874 switch (gimple_omp_target_kind (ctx->stmt))
3876 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3877 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3878 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3879 ctx_stmt_name = "parallel"; break;
3880 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3881 ctx_stmt_name = "kernels"; break;
3882 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3883 ctx_stmt_name = "serial"; break;
3884 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3885 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3886 ctx_stmt_name = "host_data"; break;
3887 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3888 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3889 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3890 /* OpenACC 'kernels' decomposed parts. */
3891 ctx_stmt_name = "kernels"; break;
3892 default: gcc_unreachable ();
3895 /* OpenACC/OpenMP mismatch? */
3896 if (is_gimple_omp_oacc (stmt)
3897 != is_gimple_omp_oacc (ctx->stmt))
3899 error_at (gimple_location (stmt),
3900 "%s %qs construct inside of %s %qs region",
3901 (is_gimple_omp_oacc (stmt)
3902 ? "OpenACC" : "OpenMP"), stmt_name,
3903 (is_gimple_omp_oacc (ctx->stmt)
3904 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3905 return false;
3907 if (is_gimple_omp_offloaded (ctx->stmt))
3909 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3910 if (is_gimple_omp_oacc (ctx->stmt))
3912 error_at (gimple_location (stmt),
3913 "%qs construct inside of %qs region",
3914 stmt_name, ctx_stmt_name);
3915 return false;
3917 else
3919 if ((gimple_omp_target_kind (ctx->stmt)
3920 == GF_OMP_TARGET_KIND_REGION)
3921 && (gimple_omp_target_kind (stmt)
3922 == GF_OMP_TARGET_KIND_REGION))
3924 c = omp_find_clause (gimple_omp_target_clauses (stmt),
3925 OMP_CLAUSE_DEVICE);
3926 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3927 break;
3929 warning_at (gimple_location (stmt), 0,
3930 "%qs construct inside of %qs region",
3931 stmt_name, ctx_stmt_name);
3935 break;
3936 default:
3937 break;
3939 return true;
3943 /* Helper function scan_omp.
3945 Callback for walk_tree or operators in walk_gimple_stmt used to
3946 scan for OMP directives in TP. */
3948 static tree
3949 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3951 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3952 omp_context *ctx = (omp_context *) wi->info;
3953 tree t = *tp;
3954 tree tmp;
3956 switch (TREE_CODE (t))
3958 case VAR_DECL:
3959 case PARM_DECL:
3960 case LABEL_DECL:
3961 case RESULT_DECL:
3962 if (ctx)
3964 tmp = NULL_TREE;
3965 if (TREE_CODE (t) == VAR_DECL
3966 && (tmp = lookup_attribute ("omp allocate var",
3967 DECL_ATTRIBUTES (t))) != NULL_TREE)
3968 t = TREE_VALUE (TREE_VALUE (tmp));
3969 tree repl = remap_decl (t, &ctx->cb);
3970 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3971 if (tmp != NULL_TREE && t != repl)
3972 *tp = build_fold_addr_expr (repl);
3973 else if (tmp == NULL_TREE)
3974 *tp = repl;
3976 break;
3978 case INDIRECT_REF:
3979 case MEM_REF:
3980 if (ctx
3981 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
3982 && ((tmp = lookup_attribute ("omp allocate var",
3983 DECL_ATTRIBUTES (TREE_OPERAND (t, 0))))
3984 != NULL_TREE))
3986 tmp = TREE_VALUE (TREE_VALUE (tmp));
3987 tree repl = remap_decl (tmp, &ctx->cb);
3988 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3989 if (tmp != repl)
3990 *tp = repl;
3991 break;
3993 gcc_fallthrough ();
3995 default:
3996 if (ctx && TYPE_P (t))
3997 *tp = remap_type (t, &ctx->cb);
3998 else if (!DECL_P (t))
4000 *walk_subtrees = 1;
4001 if (ctx)
4003 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
4004 if (tem != TREE_TYPE (t))
4006 if (TREE_CODE (t) == INTEGER_CST)
4007 *tp = wide_int_to_tree (tem, wi::to_wide (t));
4008 else
4009 TREE_TYPE (t) = tem;
4013 break;
4016 return NULL_TREE;
4019 /* Return true if FNDECL is a setjmp or a longjmp. */
4021 static bool
4022 setjmp_or_longjmp_p (const_tree fndecl)
4024 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP, BUILT_IN_LONGJMP))
4025 return true;
4027 tree declname = DECL_NAME (fndecl);
4028 if (!declname
4029 || (DECL_CONTEXT (fndecl) != NULL_TREE
4030 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
4031 || !TREE_PUBLIC (fndecl))
4032 return false;
4034 const char *name = IDENTIFIER_POINTER (declname);
4035 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
4038 /* Helper function for scan_omp.
4040 Callback for walk_gimple_stmt used to scan for OMP directives in
4041 the current statement in GSI. */
4043 static tree
4044 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4045 struct walk_stmt_info *wi)
4047 gimple *stmt = gsi_stmt (*gsi);
4048 omp_context *ctx = (omp_context *) wi->info;
4050 if (gimple_has_location (stmt))
4051 input_location = gimple_location (stmt);
4053 /* Check the nesting restrictions. */
4054 bool remove = false;
4055 if (is_gimple_omp (stmt))
4056 remove = !check_omp_nesting_restrictions (stmt, ctx);
4057 else if (is_gimple_call (stmt))
4059 tree fndecl = gimple_call_fndecl (stmt);
4060 if (fndecl)
4062 if (ctx
4063 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4064 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4065 && setjmp_or_longjmp_p (fndecl)
4066 && !ctx->loop_p)
4068 remove = true;
4069 error_at (gimple_location (stmt),
4070 "setjmp/longjmp inside %<simd%> construct");
4072 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4073 switch (DECL_FUNCTION_CODE (fndecl))
4075 case BUILT_IN_GOMP_BARRIER:
4076 case BUILT_IN_GOMP_CANCEL:
4077 case BUILT_IN_GOMP_CANCELLATION_POINT:
4078 case BUILT_IN_GOMP_TASKYIELD:
4079 case BUILT_IN_GOMP_TASKWAIT:
4080 case BUILT_IN_GOMP_TASKGROUP_START:
4081 case BUILT_IN_GOMP_TASKGROUP_END:
4082 remove = !check_omp_nesting_restrictions (stmt, ctx);
4083 break;
4084 default:
4085 break;
4087 else if (ctx)
4089 omp_context *octx = ctx;
4090 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4091 octx = ctx->outer;
4092 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4094 remove = true;
4095 error_at (gimple_location (stmt),
4096 "OpenMP runtime API call %qD in a region with "
4097 "%<order(concurrent)%> clause", fndecl);
4099 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4100 && omp_runtime_api_call (fndecl)
4101 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4102 != strlen ("omp_get_num_teams"))
4103 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4104 "omp_get_num_teams") != 0)
4105 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4106 != strlen ("omp_get_team_num"))
4107 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4108 "omp_get_team_num") != 0))
4110 remove = true;
4111 error_at (gimple_location (stmt),
4112 "OpenMP runtime API call %qD strictly nested in a "
4113 "%<teams%> region", fndecl);
4115 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
4116 && (gimple_omp_target_kind (ctx->stmt)
4117 == GF_OMP_TARGET_KIND_REGION)
4118 && omp_runtime_api_call (fndecl))
4120 tree tgt_clauses = gimple_omp_target_clauses (ctx->stmt);
4121 tree c = omp_find_clause (tgt_clauses, OMP_CLAUSE_DEVICE);
4122 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4123 error_at (gimple_location (stmt),
4124 "OpenMP runtime API call %qD in a region with "
4125 "%<device(ancestor)%> clause", fndecl);
4130 if (remove)
4132 stmt = gimple_build_nop ();
4133 gsi_replace (gsi, stmt, false);
4136 *handled_ops_p = true;
4138 switch (gimple_code (stmt))
4140 case GIMPLE_OMP_PARALLEL:
4141 taskreg_nesting_level++;
4142 scan_omp_parallel (gsi, ctx);
4143 taskreg_nesting_level--;
4144 break;
4146 case GIMPLE_OMP_TASK:
4147 taskreg_nesting_level++;
4148 scan_omp_task (gsi, ctx);
4149 taskreg_nesting_level--;
4150 break;
4152 case GIMPLE_OMP_FOR:
4153 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4154 == GF_OMP_FOR_KIND_SIMD)
4155 && gimple_omp_for_combined_into_p (stmt)
4156 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4158 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4159 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4160 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4162 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4163 break;
4166 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4167 == GF_OMP_FOR_KIND_SIMD)
4168 && omp_maybe_offloaded_ctx (ctx)
4169 && omp_max_simt_vf ()
4170 && gimple_omp_for_collapse (stmt) == 1)
4171 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4172 else
4173 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4174 break;
4176 case GIMPLE_OMP_SCOPE:
4177 ctx = new_omp_context (stmt, ctx);
4178 scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4179 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4180 break;
4182 case GIMPLE_OMP_SECTIONS:
4183 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4184 break;
4186 case GIMPLE_OMP_SINGLE:
4187 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4188 break;
4190 case GIMPLE_OMP_SCAN:
4191 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4193 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4194 ctx->scan_inclusive = true;
4195 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4196 ctx->scan_exclusive = true;
4198 /* FALLTHRU */
4199 case GIMPLE_OMP_SECTION:
4200 case GIMPLE_OMP_STRUCTURED_BLOCK:
4201 case GIMPLE_OMP_MASTER:
4202 case GIMPLE_OMP_ORDERED:
4203 case GIMPLE_OMP_CRITICAL:
4204 ctx = new_omp_context (stmt, ctx);
4205 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4206 break;
4208 case GIMPLE_OMP_MASKED:
4209 ctx = new_omp_context (stmt, ctx);
4210 scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4211 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4212 break;
4214 case GIMPLE_OMP_TASKGROUP:
4215 ctx = new_omp_context (stmt, ctx);
4216 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4217 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4218 break;
4220 case GIMPLE_OMP_TARGET:
4221 if (is_gimple_omp_offloaded (stmt))
4223 taskreg_nesting_level++;
4224 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4225 taskreg_nesting_level--;
4227 else
4228 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4229 break;
4231 case GIMPLE_OMP_TEAMS:
4232 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4234 taskreg_nesting_level++;
4235 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4236 taskreg_nesting_level--;
4238 else
4239 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4240 break;
4242 case GIMPLE_BIND:
4244 tree var;
4246 *handled_ops_p = false;
4247 if (ctx)
4248 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4249 var ;
4250 var = DECL_CHAIN (var))
4251 insert_decl_map (&ctx->cb, var, var);
4253 break;
4254 default:
4255 *handled_ops_p = false;
4256 break;
4259 return NULL_TREE;
4263 /* Scan all the statements starting at the current statement. CTX
4264 contains context information about the OMP directives and
4265 clauses found during the scan. */
4267 static void
4268 scan_omp (gimple_seq *body_p, omp_context *ctx)
4270 location_t saved_location;
4271 struct walk_stmt_info wi;
4273 memset (&wi, 0, sizeof (wi));
4274 wi.info = ctx;
4275 wi.want_locations = true;
4277 saved_location = input_location;
4278 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4279 input_location = saved_location;
4282 /* Re-gimplification and code generation routines. */
4284 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4285 of BIND if in a method. */
4287 static void
4288 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4290 if (DECL_ARGUMENTS (current_function_decl)
4291 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4292 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4293 == POINTER_TYPE))
4295 tree vars = gimple_bind_vars (bind);
4296 for (tree *pvar = &vars; *pvar; )
4297 if (omp_member_access_dummy_var (*pvar))
4298 *pvar = DECL_CHAIN (*pvar);
4299 else
4300 pvar = &DECL_CHAIN (*pvar);
4301 gimple_bind_set_vars (bind, vars);
4305 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4306 block and its subblocks. */
4308 static void
4309 remove_member_access_dummy_vars (tree block)
4311 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4312 if (omp_member_access_dummy_var (*pvar))
4313 *pvar = DECL_CHAIN (*pvar);
4314 else
4315 pvar = &DECL_CHAIN (*pvar);
4317 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4318 remove_member_access_dummy_vars (block);
4321 /* If a context was created for STMT when it was scanned, return it. */
4323 static omp_context *
4324 maybe_lookup_ctx (gimple *stmt)
4326 splay_tree_node n;
4327 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4328 return n ? (omp_context *) n->value : NULL;
4332 /* Find the mapping for DECL in CTX or the immediately enclosing
4333 context that has a mapping for DECL.
4335 If CTX is a nested parallel directive, we may have to use the decl
4336 mappings created in CTX's parent context. Suppose that we have the
4337 following parallel nesting (variable UIDs showed for clarity):
4339 iD.1562 = 0;
4340 #omp parallel shared(iD.1562) -> outer parallel
4341 iD.1562 = iD.1562 + 1;
4343 #omp parallel shared (iD.1562) -> inner parallel
4344 iD.1562 = iD.1562 - 1;
4346 Each parallel structure will create a distinct .omp_data_s structure
4347 for copying iD.1562 in/out of the directive:
4349 outer parallel .omp_data_s.1.i -> iD.1562
4350 inner parallel .omp_data_s.2.i -> iD.1562
4352 A shared variable mapping will produce a copy-out operation before
4353 the parallel directive and a copy-in operation after it. So, in
4354 this case we would have:
4356 iD.1562 = 0;
4357 .omp_data_o.1.i = iD.1562;
4358 #omp parallel shared(iD.1562) -> outer parallel
4359 .omp_data_i.1 = &.omp_data_o.1
4360 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4362 .omp_data_o.2.i = iD.1562; -> **
4363 #omp parallel shared(iD.1562) -> inner parallel
4364 .omp_data_i.2 = &.omp_data_o.2
4365 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4368 ** This is a problem. The symbol iD.1562 cannot be referenced
4369 inside the body of the outer parallel region. But since we are
4370 emitting this copy operation while expanding the inner parallel
4371 directive, we need to access the CTX structure of the outer
4372 parallel directive to get the correct mapping:
4374 .omp_data_o.2.i = .omp_data_i.1->i
4376 Since there may be other workshare or parallel directives enclosing
4377 the parallel directive, it may be necessary to walk up the context
4378 parent chain. This is not a problem in general because nested
4379 parallelism happens only rarely. */
4381 static tree
4382 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4384 tree t;
4385 omp_context *up;
4387 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4388 t = maybe_lookup_decl (decl, up);
4390 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4392 return t ? t : decl;
4396 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4397 in outer contexts. */
4399 static tree
4400 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4402 tree t = NULL;
4403 omp_context *up;
4405 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4406 t = maybe_lookup_decl (decl, up);
4408 return t ? t : decl;
4412 /* Construct the initialization value for reduction operation OP. */
4414 tree
4415 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4417 switch (op)
4419 case PLUS_EXPR:
4420 case MINUS_EXPR:
4421 case BIT_IOR_EXPR:
4422 case BIT_XOR_EXPR:
4423 case TRUTH_OR_EXPR:
4424 case TRUTH_ORIF_EXPR:
4425 case TRUTH_XOR_EXPR:
4426 case NE_EXPR:
4427 return build_zero_cst (type);
4429 case MULT_EXPR:
4430 case TRUTH_AND_EXPR:
4431 case TRUTH_ANDIF_EXPR:
4432 case EQ_EXPR:
4433 return fold_convert_loc (loc, type, integer_one_node);
4435 case BIT_AND_EXPR:
4436 return fold_convert_loc (loc, type, integer_minus_one_node);
4438 case MAX_EXPR:
4439 if (SCALAR_FLOAT_TYPE_P (type))
4441 REAL_VALUE_TYPE min;
4442 if (HONOR_INFINITIES (type))
4443 real_arithmetic (&min, NEGATE_EXPR, &dconstinf, NULL);
4444 else
4445 real_maxval (&min, 1, TYPE_MODE (type));
4446 return build_real (type, min);
4448 else if (POINTER_TYPE_P (type))
4450 wide_int min
4451 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4452 return wide_int_to_tree (type, min);
4454 else
4456 gcc_assert (INTEGRAL_TYPE_P (type));
4457 return TYPE_MIN_VALUE (type);
4460 case MIN_EXPR:
4461 if (SCALAR_FLOAT_TYPE_P (type))
4463 REAL_VALUE_TYPE max;
4464 if (HONOR_INFINITIES (type))
4465 max = dconstinf;
4466 else
4467 real_maxval (&max, 0, TYPE_MODE (type));
4468 return build_real (type, max);
4470 else if (POINTER_TYPE_P (type))
4472 wide_int max
4473 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4474 return wide_int_to_tree (type, max);
4476 else
4478 gcc_assert (INTEGRAL_TYPE_P (type));
4479 return TYPE_MAX_VALUE (type);
4482 default:
4483 gcc_unreachable ();
4487 /* Construct the initialization value for reduction CLAUSE. */
4489 tree
4490 omp_reduction_init (tree clause, tree type)
4492 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4493 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4496 /* Return alignment to be assumed for var in CLAUSE, which should be
4497 OMP_CLAUSE_ALIGNED. */
4499 static tree
4500 omp_clause_aligned_alignment (tree clause)
4502 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4503 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4505 /* Otherwise return implementation defined alignment. */
4506 unsigned int al = 1;
4507 opt_scalar_mode mode_iter;
4508 auto_vector_modes modes;
4509 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4510 static enum mode_class classes[]
4511 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4512 for (int i = 0; i < 4; i += 2)
4513 /* The for loop above dictates that we only walk through scalar classes. */
4514 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4516 scalar_mode mode = mode_iter.require ();
4517 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4518 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4519 continue;
4520 machine_mode alt_vmode;
4521 for (unsigned int j = 0; j < modes.length (); ++j)
4522 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4523 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4524 vmode = alt_vmode;
4526 tree type = lang_hooks.types.type_for_mode (mode, 1);
4527 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4528 continue;
4529 type = build_vector_type_for_mode (type, vmode);
4530 if (TYPE_MODE (type) != vmode)
4531 continue;
4532 if (TYPE_ALIGN_UNIT (type) > al)
4533 al = TYPE_ALIGN_UNIT (type);
4535 return build_int_cst (integer_type_node, al);
4539 /* This structure is part of the interface between lower_rec_simd_input_clauses
4540 and lower_rec_input_clauses. */
4542 class omplow_simd_context {
4543 public:
4544 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4545 tree idx;
4546 tree lane;
4547 tree lastlane;
4548 vec<tree, va_heap> simt_eargs;
4549 gimple_seq simt_dlist;
4550 poly_uint64 max_vf;
4551 bool is_simt;
4554 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4555 privatization. */
4557 static bool
4558 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4559 omplow_simd_context *sctx, tree &ivar,
4560 tree &lvar, tree *rvar = NULL,
4561 tree *rvar2 = NULL)
4563 if (known_eq (sctx->max_vf, 0U))
4565 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4566 if (maybe_gt (sctx->max_vf, 1U))
4568 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4569 OMP_CLAUSE_SAFELEN);
4570 if (c)
4572 poly_uint64 safe_len;
4573 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4574 || maybe_lt (safe_len, 1U))
4575 sctx->max_vf = 1;
4576 else
4577 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4580 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4582 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4583 c = OMP_CLAUSE_CHAIN (c))
4585 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4586 continue;
4588 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4590 /* UDR reductions are not supported yet for SIMT, disable
4591 SIMT. */
4592 sctx->max_vf = 1;
4593 break;
4596 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4597 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4599 /* Doing boolean operations on non-integral types is
4600 for conformance only, it's not worth supporting this
4601 for SIMT. */
4602 sctx->max_vf = 1;
4603 break;
4607 if (maybe_gt (sctx->max_vf, 1U))
4609 sctx->idx = create_tmp_var (unsigned_type_node);
4610 sctx->lane = create_tmp_var (unsigned_type_node);
4613 if (known_eq (sctx->max_vf, 1U))
4614 return false;
4616 if (sctx->is_simt)
4618 if (is_gimple_reg (new_var))
4620 ivar = lvar = new_var;
4621 return true;
4623 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4624 ivar = lvar = create_tmp_var (type);
4625 TREE_ADDRESSABLE (ivar) = 1;
4626 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4627 NULL, DECL_ATTRIBUTES (ivar));
4628 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4629 tree clobber = build_clobber (type);
4630 gimple *g = gimple_build_assign (ivar, clobber);
4631 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4633 else
4635 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4636 tree avar = create_tmp_var_raw (atype);
4637 if (TREE_ADDRESSABLE (new_var))
4638 TREE_ADDRESSABLE (avar) = 1;
4639 DECL_ATTRIBUTES (avar)
4640 = tree_cons (get_identifier ("omp simd array"), NULL,
4641 DECL_ATTRIBUTES (avar));
4642 gimple_add_tmp_var (avar);
4643 tree iavar = avar;
4644 if (rvar && !ctx->for_simd_scan_phase)
4646 /* For inscan reductions, create another array temporary,
4647 which will hold the reduced value. */
4648 iavar = create_tmp_var_raw (atype);
4649 if (TREE_ADDRESSABLE (new_var))
4650 TREE_ADDRESSABLE (iavar) = 1;
4651 DECL_ATTRIBUTES (iavar)
4652 = tree_cons (get_identifier ("omp simd array"), NULL,
4653 tree_cons (get_identifier ("omp simd inscan"), NULL,
4654 DECL_ATTRIBUTES (iavar)));
4655 gimple_add_tmp_var (iavar);
4656 ctx->cb.decl_map->put (avar, iavar);
4657 if (sctx->lastlane == NULL_TREE)
4658 sctx->lastlane = create_tmp_var (unsigned_type_node);
4659 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4660 sctx->lastlane, NULL_TREE, NULL_TREE);
4661 TREE_THIS_NOTRAP (*rvar) = 1;
4663 if (ctx->scan_exclusive)
4665 /* And for exclusive scan yet another one, which will
4666 hold the value during the scan phase. */
4667 tree savar = create_tmp_var_raw (atype);
4668 if (TREE_ADDRESSABLE (new_var))
4669 TREE_ADDRESSABLE (savar) = 1;
4670 DECL_ATTRIBUTES (savar)
4671 = tree_cons (get_identifier ("omp simd array"), NULL,
4672 tree_cons (get_identifier ("omp simd inscan "
4673 "exclusive"), NULL,
4674 DECL_ATTRIBUTES (savar)));
4675 gimple_add_tmp_var (savar);
4676 ctx->cb.decl_map->put (iavar, savar);
4677 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4678 sctx->idx, NULL_TREE, NULL_TREE);
4679 TREE_THIS_NOTRAP (*rvar2) = 1;
4682 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4683 NULL_TREE, NULL_TREE);
4684 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4685 NULL_TREE, NULL_TREE);
4686 TREE_THIS_NOTRAP (ivar) = 1;
4687 TREE_THIS_NOTRAP (lvar) = 1;
4689 if (DECL_P (new_var))
4691 SET_DECL_VALUE_EXPR (new_var, lvar);
4692 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4694 return true;
4697 /* Helper function of lower_rec_input_clauses. For a reference
4698 in simd reduction, add an underlying variable it will reference. */
4700 static void
4701 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4703 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4704 if (TREE_CONSTANT (z))
4706 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4707 get_name (new_vard));
4708 gimple_add_tmp_var (z);
4709 TREE_ADDRESSABLE (z) = 1;
4710 z = build_fold_addr_expr_loc (loc, z);
4711 gimplify_assign (new_vard, z, ilist);
4715 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4716 code to emit (type) (tskred_temp[idx]). */
4718 static tree
4719 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4720 unsigned idx)
4722 unsigned HOST_WIDE_INT sz
4723 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4724 tree r = build2 (MEM_REF, pointer_sized_int_node,
4725 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4726 idx * sz));
4727 tree v = create_tmp_var (pointer_sized_int_node);
4728 gimple *g = gimple_build_assign (v, r);
4729 gimple_seq_add_stmt (ilist, g);
4730 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4732 v = create_tmp_var (type);
4733 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4734 gimple_seq_add_stmt (ilist, g);
4736 return v;
4739 /* Lower early initialization of privatized variable NEW_VAR
4740 if it needs an allocator (has allocate clause). */
4742 static bool
4743 lower_private_allocate (tree var, tree new_var, tree &allocator,
4744 tree &allocate_ptr, gimple_seq *ilist,
4745 omp_context *ctx, bool is_ref, tree size)
4747 if (allocator)
4748 return false;
4749 gcc_assert (allocate_ptr == NULL_TREE);
4750 if (ctx->allocate_map
4751 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4752 if (tree *allocatorp = ctx->allocate_map->get (var))
4753 allocator = *allocatorp;
4754 if (allocator == NULL_TREE)
4755 return false;
4756 if (!is_ref && omp_privatize_by_reference (var))
4758 allocator = NULL_TREE;
4759 return false;
4762 unsigned HOST_WIDE_INT ialign = 0;
4763 if (TREE_CODE (allocator) == TREE_LIST)
4765 ialign = tree_to_uhwi (TREE_VALUE (allocator));
4766 allocator = TREE_PURPOSE (allocator);
4768 if (TREE_CODE (allocator) != INTEGER_CST)
4769 allocator = build_outer_var_ref (allocator, ctx, OMP_CLAUSE_ALLOCATE);
4770 allocator = fold_convert (pointer_sized_int_node, allocator);
4771 if (TREE_CODE (allocator) != INTEGER_CST)
4773 tree var = create_tmp_var (TREE_TYPE (allocator));
4774 gimplify_assign (var, allocator, ilist);
4775 allocator = var;
4778 tree ptr_type, align, sz = size;
4779 if (TYPE_P (new_var))
4781 ptr_type = build_pointer_type (new_var);
4782 ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4784 else if (is_ref)
4786 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4787 ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4789 else
4791 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4792 ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4793 if (sz == NULL_TREE)
4794 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4796 align = build_int_cst (size_type_node, ialign);
4797 if (TREE_CODE (sz) != INTEGER_CST)
4799 tree szvar = create_tmp_var (size_type_node);
4800 gimplify_assign (szvar, sz, ilist);
4801 sz = szvar;
4803 allocate_ptr = create_tmp_var (ptr_type);
4804 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4805 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4806 gimple_call_set_lhs (g, allocate_ptr);
4807 gimple_seq_add_stmt (ilist, g);
4808 if (!is_ref)
4810 tree x = build_simple_mem_ref (allocate_ptr);
4811 TREE_THIS_NOTRAP (x) = 1;
4812 SET_DECL_VALUE_EXPR (new_var, x);
4813 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4815 return true;
4818 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4819 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4820 private variables. Initialization statements go in ILIST, while calls
4821 to destructors go in DLIST. */
4823 static void
4824 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4825 omp_context *ctx, struct omp_for_data *fd)
4827 tree c, copyin_seq, x, ptr;
4828 bool copyin_by_ref = false;
4829 bool lastprivate_firstprivate = false;
4830 bool reduction_omp_orig_ref = false;
4831 int pass;
4832 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4833 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4834 omplow_simd_context sctx = omplow_simd_context ();
4835 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4836 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4837 gimple_seq llist[4] = { };
4838 tree nonconst_simd_if = NULL_TREE;
4840 copyin_seq = NULL;
4841 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4843 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4844 with data sharing clauses referencing variable sized vars. That
4845 is unnecessarily hard to support and very unlikely to result in
4846 vectorized code anyway. */
4847 if (is_simd)
4848 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4849 switch (OMP_CLAUSE_CODE (c))
4851 case OMP_CLAUSE_LINEAR:
4852 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4853 sctx.max_vf = 1;
4854 /* FALLTHRU */
4855 case OMP_CLAUSE_PRIVATE:
4856 case OMP_CLAUSE_FIRSTPRIVATE:
4857 case OMP_CLAUSE_LASTPRIVATE:
4858 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4859 sctx.max_vf = 1;
4860 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4862 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4863 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4864 sctx.max_vf = 1;
4866 break;
4867 case OMP_CLAUSE_REDUCTION:
4868 case OMP_CLAUSE_IN_REDUCTION:
4869 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4870 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4871 sctx.max_vf = 1;
4872 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4874 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4875 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4876 sctx.max_vf = 1;
4878 break;
4879 case OMP_CLAUSE_IF:
4880 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4881 sctx.max_vf = 1;
4882 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4883 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4884 break;
4885 case OMP_CLAUSE_SIMDLEN:
4886 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4887 sctx.max_vf = 1;
4888 break;
4889 case OMP_CLAUSE__CONDTEMP_:
4890 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4891 if (sctx.is_simt)
4892 sctx.max_vf = 1;
4893 break;
4894 default:
4895 continue;
4898 /* Add a placeholder for simduid. */
4899 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4900 sctx.simt_eargs.safe_push (NULL_TREE);
4902 unsigned task_reduction_cnt = 0;
4903 unsigned task_reduction_cntorig = 0;
4904 unsigned task_reduction_cnt_full = 0;
4905 unsigned task_reduction_cntorig_full = 0;
4906 unsigned task_reduction_other_cnt = 0;
4907 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4908 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4909 /* Do all the fixed sized types in the first pass, and the variable sized
4910 types in the second pass. This makes sure that the scalar arguments to
4911 the variable sized types are processed before we use them in the
4912 variable sized operations. For task reductions we use 4 passes, in the
4913 first two we ignore them, in the third one gather arguments for
4914 GOMP_task_reduction_remap call and in the last pass actually handle
4915 the task reductions. */
4916 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4917 ? 4 : 2); ++pass)
4919 if (pass == 2 && task_reduction_cnt)
4921 tskred_atype
4922 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4923 + task_reduction_cntorig);
4924 tskred_avar = create_tmp_var_raw (tskred_atype);
4925 gimple_add_tmp_var (tskred_avar);
4926 TREE_ADDRESSABLE (tskred_avar) = 1;
4927 task_reduction_cnt_full = task_reduction_cnt;
4928 task_reduction_cntorig_full = task_reduction_cntorig;
4930 else if (pass == 3 && task_reduction_cnt)
4932 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4933 gimple *g
4934 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4935 size_int (task_reduction_cntorig),
4936 build_fold_addr_expr (tskred_avar));
4937 gimple_seq_add_stmt (ilist, g);
4939 if (pass == 3 && task_reduction_other_cnt)
4941 /* For reduction clauses, build
4942 tskred_base = (void *) tskred_temp[2]
4943 + omp_get_thread_num () * tskred_temp[1]
4944 or if tskred_temp[1] is known to be constant, that constant
4945 directly. This is the start of the private reduction copy block
4946 for the current thread. */
4947 tree v = create_tmp_var (integer_type_node);
4948 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4949 gimple *g = gimple_build_call (x, 0);
4950 gimple_call_set_lhs (g, v);
4951 gimple_seq_add_stmt (ilist, g);
4952 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4953 tskred_temp = OMP_CLAUSE_DECL (c);
4954 if (is_taskreg_ctx (ctx))
4955 tskred_temp = lookup_decl (tskred_temp, ctx);
4956 tree v2 = create_tmp_var (sizetype);
4957 g = gimple_build_assign (v2, NOP_EXPR, v);
4958 gimple_seq_add_stmt (ilist, g);
4959 if (ctx->task_reductions[0])
4960 v = fold_convert (sizetype, ctx->task_reductions[0]);
4961 else
4962 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4963 tree v3 = create_tmp_var (sizetype);
4964 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4965 gimple_seq_add_stmt (ilist, g);
4966 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4967 tskred_base = create_tmp_var (ptr_type_node);
4968 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4969 gimple_seq_add_stmt (ilist, g);
4971 task_reduction_cnt = 0;
4972 task_reduction_cntorig = 0;
4973 task_reduction_other_cnt = 0;
4974 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4976 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4977 tree var, new_var;
4978 bool by_ref;
4979 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4980 bool task_reduction_p = false;
4981 bool task_reduction_needs_orig_p = false;
4982 tree cond = NULL_TREE;
4983 tree allocator, allocate_ptr;
4985 switch (c_kind)
4987 case OMP_CLAUSE_PRIVATE:
4988 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4989 continue;
4990 break;
4991 case OMP_CLAUSE_SHARED:
4992 /* Ignore shared directives in teams construct inside
4993 of target construct. */
4994 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4995 && !is_host_teams_ctx (ctx))
4996 continue;
4997 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4999 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
5000 || is_global_var (OMP_CLAUSE_DECL (c)));
5001 continue;
5003 case OMP_CLAUSE_FIRSTPRIVATE:
5004 case OMP_CLAUSE_COPYIN:
5005 break;
5006 case OMP_CLAUSE_LINEAR:
5007 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
5008 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5009 lastprivate_firstprivate = true;
5010 break;
5011 case OMP_CLAUSE_REDUCTION:
5012 case OMP_CLAUSE_IN_REDUCTION:
5013 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5014 || is_task_ctx (ctx)
5015 || OMP_CLAUSE_REDUCTION_TASK (c))
5017 task_reduction_p = true;
5018 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5020 task_reduction_other_cnt++;
5021 if (pass == 2)
5022 continue;
5024 else
5025 task_reduction_cnt++;
5026 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5028 var = OMP_CLAUSE_DECL (c);
5029 /* If var is a global variable that isn't privatized
5030 in outer contexts, we don't need to look up the
5031 original address, it is always the address of the
5032 global variable itself. */
5033 if (!DECL_P (var)
5034 || omp_privatize_by_reference (var)
5035 || !is_global_var
5036 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5038 task_reduction_needs_orig_p = true;
5039 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5040 task_reduction_cntorig++;
5044 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5045 reduction_omp_orig_ref = true;
5046 break;
5047 case OMP_CLAUSE__REDUCTEMP_:
5048 if (!is_taskreg_ctx (ctx))
5049 continue;
5050 /* FALLTHRU */
5051 case OMP_CLAUSE__LOOPTEMP_:
5052 /* Handle _looptemp_/_reductemp_ clauses only on
5053 parallel/task. */
5054 if (fd)
5055 continue;
5056 break;
5057 case OMP_CLAUSE_LASTPRIVATE:
5058 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5060 lastprivate_firstprivate = true;
5061 if (pass != 0 || is_taskloop_ctx (ctx))
5062 continue;
5064 /* Even without corresponding firstprivate, if
5065 decl is Fortran allocatable, it needs outer var
5066 reference. */
5067 else if (pass == 0
5068 && lang_hooks.decls.omp_private_outer_ref
5069 (OMP_CLAUSE_DECL (c)))
5070 lastprivate_firstprivate = true;
5071 break;
5072 case OMP_CLAUSE_ALIGNED:
5073 if (pass != 1)
5074 continue;
5075 var = OMP_CLAUSE_DECL (c);
5076 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5077 && !is_global_var (var))
5079 new_var = maybe_lookup_decl (var, ctx);
5080 if (new_var == NULL_TREE)
5081 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5082 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5083 tree alarg = omp_clause_aligned_alignment (c);
5084 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5085 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5086 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5087 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5088 gimplify_and_add (x, ilist);
5090 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5091 && is_global_var (var))
5093 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5094 new_var = lookup_decl (var, ctx);
5095 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5096 t = build_fold_addr_expr_loc (clause_loc, t);
5097 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5098 tree alarg = omp_clause_aligned_alignment (c);
5099 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5100 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5101 t = fold_convert_loc (clause_loc, ptype, t);
5102 x = create_tmp_var (ptype);
5103 t = build2 (MODIFY_EXPR, ptype, x, t);
5104 gimplify_and_add (t, ilist);
5105 t = build_simple_mem_ref_loc (clause_loc, x);
5106 SET_DECL_VALUE_EXPR (new_var, t);
5107 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5109 continue;
5110 case OMP_CLAUSE__CONDTEMP_:
5111 if (is_parallel_ctx (ctx)
5112 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5113 break;
5114 continue;
5115 default:
5116 continue;
5119 if (task_reduction_p != (pass >= 2))
5120 continue;
5122 allocator = NULL_TREE;
5123 allocate_ptr = NULL_TREE;
5124 new_var = var = OMP_CLAUSE_DECL (c);
5125 if ((c_kind == OMP_CLAUSE_REDUCTION
5126 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5127 && TREE_CODE (var) == MEM_REF)
5129 var = TREE_OPERAND (var, 0);
5130 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5131 var = TREE_OPERAND (var, 0);
5132 if (TREE_CODE (var) == INDIRECT_REF
5133 || TREE_CODE (var) == ADDR_EXPR)
5134 var = TREE_OPERAND (var, 0);
5135 if (is_variable_sized (var))
5137 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5138 var = DECL_VALUE_EXPR (var);
5139 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5140 var = TREE_OPERAND (var, 0);
5141 gcc_assert (DECL_P (var));
5143 new_var = var;
5145 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5147 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5148 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5150 else if (c_kind != OMP_CLAUSE_COPYIN)
5151 new_var = lookup_decl (var, ctx);
5153 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5155 if (pass != 0)
5156 continue;
5158 /* C/C++ array section reductions. */
5159 else if ((c_kind == OMP_CLAUSE_REDUCTION
5160 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5161 && var != OMP_CLAUSE_DECL (c))
5163 if (pass == 0)
5164 continue;
5166 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5167 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5169 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5171 tree b = TREE_OPERAND (orig_var, 1);
5172 if (is_omp_target (ctx->stmt))
5173 b = NULL_TREE;
5174 else
5175 b = maybe_lookup_decl (b, ctx);
5176 if (b == NULL)
5178 b = TREE_OPERAND (orig_var, 1);
5179 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5181 if (integer_zerop (bias))
5182 bias = b;
5183 else
5185 bias = fold_convert_loc (clause_loc,
5186 TREE_TYPE (b), bias);
5187 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5188 TREE_TYPE (b), b, bias);
5190 orig_var = TREE_OPERAND (orig_var, 0);
5192 if (pass == 2)
5194 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5195 if (is_global_var (out)
5196 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5197 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5198 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5199 != POINTER_TYPE)))
5200 x = var;
5201 else if (is_omp_target (ctx->stmt))
5202 x = out;
5203 else
5205 bool by_ref = use_pointer_for_field (var, NULL);
5206 x = build_receiver_ref (var, by_ref, ctx);
5207 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5208 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5209 == POINTER_TYPE))
5210 x = build_fold_addr_expr (x);
5212 if (TREE_CODE (orig_var) == INDIRECT_REF)
5213 x = build_simple_mem_ref (x);
5214 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5216 if (var == TREE_OPERAND (orig_var, 0))
5217 x = build_fold_addr_expr (x);
5219 bias = fold_convert (sizetype, bias);
5220 x = fold_convert (ptr_type_node, x);
5221 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5222 TREE_TYPE (x), x, bias);
5223 unsigned cnt = task_reduction_cnt - 1;
5224 if (!task_reduction_needs_orig_p)
5225 cnt += (task_reduction_cntorig_full
5226 - task_reduction_cntorig);
5227 else
5228 cnt = task_reduction_cntorig - 1;
5229 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5230 size_int (cnt), NULL_TREE, NULL_TREE);
5231 gimplify_assign (r, x, ilist);
5232 continue;
5235 if (TREE_CODE (orig_var) == INDIRECT_REF
5236 || TREE_CODE (orig_var) == ADDR_EXPR)
5237 orig_var = TREE_OPERAND (orig_var, 0);
5238 tree d = OMP_CLAUSE_DECL (c);
5239 tree type = TREE_TYPE (d);
5240 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5241 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5242 tree sz = v;
5243 const char *name = get_name (orig_var);
5244 if (pass != 3 && !TREE_CONSTANT (v))
5246 tree t;
5247 if (is_omp_target (ctx->stmt))
5248 t = NULL_TREE;
5249 else
5250 t = maybe_lookup_decl (v, ctx);
5251 if (t)
5252 v = t;
5253 else
5254 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5255 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5256 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5257 TREE_TYPE (v), v,
5258 build_int_cst (TREE_TYPE (v), 1));
5259 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5260 TREE_TYPE (v), t,
5261 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5263 if (pass == 3)
5265 tree xv = create_tmp_var (ptr_type_node);
5266 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5268 unsigned cnt = task_reduction_cnt - 1;
5269 if (!task_reduction_needs_orig_p)
5270 cnt += (task_reduction_cntorig_full
5271 - task_reduction_cntorig);
5272 else
5273 cnt = task_reduction_cntorig - 1;
5274 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5275 size_int (cnt), NULL_TREE, NULL_TREE);
5277 gimple *g = gimple_build_assign (xv, x);
5278 gimple_seq_add_stmt (ilist, g);
5280 else
5282 unsigned int idx = *ctx->task_reduction_map->get (c);
5283 tree off;
5284 if (ctx->task_reductions[1 + idx])
5285 off = fold_convert (sizetype,
5286 ctx->task_reductions[1 + idx]);
5287 else
5288 off = task_reduction_read (ilist, tskred_temp, sizetype,
5289 7 + 3 * idx + 1);
5290 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5291 tskred_base, off);
5292 gimple_seq_add_stmt (ilist, g);
5294 x = fold_convert (build_pointer_type (boolean_type_node),
5295 xv);
5296 if (TREE_CONSTANT (v))
5297 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5298 TYPE_SIZE_UNIT (type));
5299 else
5301 tree t;
5302 if (is_omp_target (ctx->stmt))
5303 t = NULL_TREE;
5304 else
5305 t = maybe_lookup_decl (v, ctx);
5306 if (t)
5307 v = t;
5308 else
5309 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5310 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5311 fb_rvalue);
5312 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5313 TREE_TYPE (v), v,
5314 build_int_cst (TREE_TYPE (v), 1));
5315 t = fold_build2_loc (clause_loc, MULT_EXPR,
5316 TREE_TYPE (v), t,
5317 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5318 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5320 cond = create_tmp_var (TREE_TYPE (x));
5321 gimplify_assign (cond, x, ilist);
5322 x = xv;
5324 else if (lower_private_allocate (var, type, allocator,
5325 allocate_ptr, ilist, ctx,
5326 true,
5327 TREE_CONSTANT (v)
5328 ? TYPE_SIZE_UNIT (type)
5329 : sz))
5330 x = allocate_ptr;
5331 else if (TREE_CONSTANT (v))
5333 x = create_tmp_var_raw (type, name);
5334 gimple_add_tmp_var (x);
5335 TREE_ADDRESSABLE (x) = 1;
5336 x = build_fold_addr_expr_loc (clause_loc, x);
5338 else
5340 tree atmp
5341 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5342 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5343 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5346 tree ptype = build_pointer_type (TREE_TYPE (type));
5347 x = fold_convert_loc (clause_loc, ptype, x);
5348 tree y = create_tmp_var (ptype, name);
5349 gimplify_assign (y, x, ilist);
5350 x = y;
5351 tree yb = y;
5353 if (!integer_zerop (bias))
5355 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5356 bias);
5357 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5359 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5360 pointer_sized_int_node, yb, bias);
5361 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5362 yb = create_tmp_var (ptype, name);
5363 gimplify_assign (yb, x, ilist);
5364 x = yb;
5367 d = TREE_OPERAND (d, 0);
5368 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5369 d = TREE_OPERAND (d, 0);
5370 if (TREE_CODE (d) == ADDR_EXPR)
5372 if (orig_var != var)
5374 gcc_assert (is_variable_sized (orig_var));
5375 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5377 gimplify_assign (new_var, x, ilist);
5378 tree new_orig_var = lookup_decl (orig_var, ctx);
5379 tree t = build_fold_indirect_ref (new_var);
5380 DECL_IGNORED_P (new_var) = 0;
5381 TREE_THIS_NOTRAP (t) = 1;
5382 SET_DECL_VALUE_EXPR (new_orig_var, t);
5383 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5385 else
5387 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5388 build_int_cst (ptype, 0));
5389 SET_DECL_VALUE_EXPR (new_var, x);
5390 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5393 else
5395 gcc_assert (orig_var == var);
5396 if (TREE_CODE (d) == INDIRECT_REF)
5398 x = create_tmp_var (ptype, name);
5399 TREE_ADDRESSABLE (x) = 1;
5400 gimplify_assign (x, yb, ilist);
5401 x = build_fold_addr_expr_loc (clause_loc, x);
5403 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5404 gimplify_assign (new_var, x, ilist);
5406 /* GOMP_taskgroup_reduction_register memsets the whole
5407 array to zero. If the initializer is zero, we don't
5408 need to initialize it again, just mark it as ever
5409 used unconditionally, i.e. cond = true. */
5410 if (cond
5411 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5412 && initializer_zerop (omp_reduction_init (c,
5413 TREE_TYPE (type))))
5415 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5416 boolean_true_node);
5417 gimple_seq_add_stmt (ilist, g);
5418 continue;
5420 tree end = create_artificial_label (UNKNOWN_LOCATION);
5421 if (cond)
5423 gimple *g;
5424 if (!is_parallel_ctx (ctx))
5426 tree condv = create_tmp_var (boolean_type_node);
5427 g = gimple_build_assign (condv,
5428 build_simple_mem_ref (cond));
5429 gimple_seq_add_stmt (ilist, g);
5430 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5431 g = gimple_build_cond (NE_EXPR, condv,
5432 boolean_false_node, end, lab1);
5433 gimple_seq_add_stmt (ilist, g);
5434 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5436 g = gimple_build_assign (build_simple_mem_ref (cond),
5437 boolean_true_node);
5438 gimple_seq_add_stmt (ilist, g);
5441 tree y1 = create_tmp_var (ptype);
5442 gimplify_assign (y1, y, ilist);
5443 tree i2 = NULL_TREE, y2 = NULL_TREE;
5444 tree body2 = NULL_TREE, end2 = NULL_TREE;
5445 tree y3 = NULL_TREE, y4 = NULL_TREE;
5446 if (task_reduction_needs_orig_p)
5448 y3 = create_tmp_var (ptype);
5449 tree ref;
5450 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5451 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5452 size_int (task_reduction_cnt_full
5453 + task_reduction_cntorig - 1),
5454 NULL_TREE, NULL_TREE);
5455 else
5457 unsigned int idx = *ctx->task_reduction_map->get (c);
5458 ref = task_reduction_read (ilist, tskred_temp, ptype,
5459 7 + 3 * idx);
5461 gimplify_assign (y3, ref, ilist);
5463 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5465 if (pass != 3)
5467 y2 = create_tmp_var (ptype);
5468 gimplify_assign (y2, y, ilist);
5470 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5472 tree ref = build_outer_var_ref (var, ctx);
5473 /* For ref build_outer_var_ref already performs this. */
5474 if (TREE_CODE (d) == INDIRECT_REF)
5475 gcc_assert (omp_privatize_by_reference (var));
5476 else if (TREE_CODE (d) == ADDR_EXPR)
5477 ref = build_fold_addr_expr (ref);
5478 else if (omp_privatize_by_reference (var))
5479 ref = build_fold_addr_expr (ref);
5480 ref = fold_convert_loc (clause_loc, ptype, ref);
5481 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5482 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5484 y3 = create_tmp_var (ptype);
5485 gimplify_assign (y3, unshare_expr (ref), ilist);
5487 if (is_simd)
5489 y4 = create_tmp_var (ptype);
5490 gimplify_assign (y4, ref, dlist);
5494 tree i = create_tmp_var (TREE_TYPE (v));
5495 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5496 tree body = create_artificial_label (UNKNOWN_LOCATION);
5497 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5498 if (y2)
5500 i2 = create_tmp_var (TREE_TYPE (v));
5501 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5502 body2 = create_artificial_label (UNKNOWN_LOCATION);
5503 end2 = create_artificial_label (UNKNOWN_LOCATION);
5504 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5506 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5508 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5509 tree decl_placeholder
5510 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5511 SET_DECL_VALUE_EXPR (decl_placeholder,
5512 build_simple_mem_ref (y1));
5513 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5514 SET_DECL_VALUE_EXPR (placeholder,
5515 y3 ? build_simple_mem_ref (y3)
5516 : error_mark_node);
5517 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5518 x = lang_hooks.decls.omp_clause_default_ctor
5519 (c, build_simple_mem_ref (y1),
5520 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5521 if (x)
5522 gimplify_and_add (x, ilist);
5523 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5525 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5526 lower_omp (&tseq, ctx);
5527 gimple_seq_add_seq (ilist, tseq);
5529 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5530 if (is_simd)
5532 SET_DECL_VALUE_EXPR (decl_placeholder,
5533 build_simple_mem_ref (y2));
5534 SET_DECL_VALUE_EXPR (placeholder,
5535 build_simple_mem_ref (y4));
5536 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5537 lower_omp (&tseq, ctx);
5538 gimple_seq_add_seq (dlist, tseq);
5539 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5541 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5542 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5543 if (y2)
5545 x = lang_hooks.decls.omp_clause_dtor
5546 (c, build_simple_mem_ref (y2));
5547 if (x)
5548 gimplify_and_add (x, dlist);
5551 else
5553 x = omp_reduction_init (c, TREE_TYPE (type));
5554 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5556 /* reduction(-:var) sums up the partial results, so it
5557 acts identically to reduction(+:var). */
5558 if (code == MINUS_EXPR)
5559 code = PLUS_EXPR;
5561 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5562 if (is_simd)
5564 x = build2 (code, TREE_TYPE (type),
5565 build_simple_mem_ref (y4),
5566 build_simple_mem_ref (y2));
5567 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5570 gimple *g
5571 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5572 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5573 gimple_seq_add_stmt (ilist, g);
5574 if (y3)
5576 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5577 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5578 gimple_seq_add_stmt (ilist, g);
5580 g = gimple_build_assign (i, PLUS_EXPR, i,
5581 build_int_cst (TREE_TYPE (i), 1));
5582 gimple_seq_add_stmt (ilist, g);
5583 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5584 gimple_seq_add_stmt (ilist, g);
5585 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5586 if (y2)
5588 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5589 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5590 gimple_seq_add_stmt (dlist, g);
5591 if (y4)
5593 g = gimple_build_assign
5594 (y4, POINTER_PLUS_EXPR, y4,
5595 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5596 gimple_seq_add_stmt (dlist, g);
5598 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5599 build_int_cst (TREE_TYPE (i2), 1));
5600 gimple_seq_add_stmt (dlist, g);
5601 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5602 gimple_seq_add_stmt (dlist, g);
5603 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5605 if (allocator)
5607 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5608 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5609 gimple_seq_add_stmt (dlist, g);
5611 continue;
5613 else if (pass == 2)
5615 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5616 if (is_global_var (out))
5617 x = var;
5618 else if (is_omp_target (ctx->stmt))
5619 x = out;
5620 else
5622 bool by_ref = use_pointer_for_field (var, ctx);
5623 x = build_receiver_ref (var, by_ref, ctx);
5625 if (!omp_privatize_by_reference (var))
5626 x = build_fold_addr_expr (x);
5627 x = fold_convert (ptr_type_node, x);
5628 unsigned cnt = task_reduction_cnt - 1;
5629 if (!task_reduction_needs_orig_p)
5630 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5631 else
5632 cnt = task_reduction_cntorig - 1;
5633 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5634 size_int (cnt), NULL_TREE, NULL_TREE);
5635 gimplify_assign (r, x, ilist);
5636 continue;
5638 else if (pass == 3)
5640 tree type = TREE_TYPE (new_var);
5641 if (!omp_privatize_by_reference (var))
5642 type = build_pointer_type (type);
5643 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5645 unsigned cnt = task_reduction_cnt - 1;
5646 if (!task_reduction_needs_orig_p)
5647 cnt += (task_reduction_cntorig_full
5648 - task_reduction_cntorig);
5649 else
5650 cnt = task_reduction_cntorig - 1;
5651 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5652 size_int (cnt), NULL_TREE, NULL_TREE);
5654 else
5656 unsigned int idx = *ctx->task_reduction_map->get (c);
5657 tree off;
5658 if (ctx->task_reductions[1 + idx])
5659 off = fold_convert (sizetype,
5660 ctx->task_reductions[1 + idx]);
5661 else
5662 off = task_reduction_read (ilist, tskred_temp, sizetype,
5663 7 + 3 * idx + 1);
5664 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5665 tskred_base, off);
5667 x = fold_convert (type, x);
5668 tree t;
5669 if (omp_privatize_by_reference (var))
5671 gimplify_assign (new_var, x, ilist);
5672 t = new_var;
5673 new_var = build_simple_mem_ref (new_var);
5675 else
5677 t = create_tmp_var (type);
5678 gimplify_assign (t, x, ilist);
5679 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5680 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5682 t = fold_convert (build_pointer_type (boolean_type_node), t);
5683 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5684 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5685 cond = create_tmp_var (TREE_TYPE (t));
5686 gimplify_assign (cond, t, ilist);
5688 else if (is_variable_sized (var))
5690 /* For variable sized types, we need to allocate the
5691 actual storage here. Call alloca and store the
5692 result in the pointer decl that we created elsewhere. */
5693 if (pass == 0)
5694 continue;
5696 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5698 tree tmp;
5700 ptr = DECL_VALUE_EXPR (new_var);
5701 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5702 ptr = TREE_OPERAND (ptr, 0);
5703 gcc_assert (DECL_P (ptr));
5704 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5706 if (lower_private_allocate (var, new_var, allocator,
5707 allocate_ptr, ilist, ctx,
5708 false, x))
5709 tmp = allocate_ptr;
5710 else
5712 /* void *tmp = __builtin_alloca */
5713 tree atmp
5714 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5715 gcall *stmt
5716 = gimple_build_call (atmp, 2, x,
5717 size_int (DECL_ALIGN (var)));
5718 cfun->calls_alloca = 1;
5719 tmp = create_tmp_var_raw (ptr_type_node);
5720 gimple_add_tmp_var (tmp);
5721 gimple_call_set_lhs (stmt, tmp);
5723 gimple_seq_add_stmt (ilist, stmt);
5726 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5727 gimplify_assign (ptr, x, ilist);
5730 else if (omp_privatize_by_reference (var)
5731 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5732 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5734 /* For references that are being privatized for Fortran,
5735 allocate new backing storage for the new pointer
5736 variable. This allows us to avoid changing all the
5737 code that expects a pointer to something that expects
5738 a direct variable. */
5739 if (pass == 0)
5740 continue;
5742 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5743 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5745 x = build_receiver_ref (var, false, ctx);
5746 if (ctx->allocate_map)
5747 if (tree *allocatep = ctx->allocate_map->get (var))
5749 allocator = *allocatep;
5750 if (TREE_CODE (allocator) == TREE_LIST)
5751 allocator = TREE_PURPOSE (allocator);
5752 if (TREE_CODE (allocator) != INTEGER_CST)
5753 allocator = build_outer_var_ref (allocator, ctx);
5754 allocator = fold_convert (pointer_sized_int_node,
5755 allocator);
5756 allocate_ptr = unshare_expr (x);
5758 if (allocator == NULL_TREE)
5759 x = build_fold_addr_expr_loc (clause_loc, x);
5761 else if (lower_private_allocate (var, new_var, allocator,
5762 allocate_ptr,
5763 ilist, ctx, true, x))
5764 x = allocate_ptr;
5765 else if (TREE_CONSTANT (x))
5767 /* For reduction in SIMD loop, defer adding the
5768 initialization of the reference, because if we decide
5769 to use SIMD array for it, the initilization could cause
5770 expansion ICE. Ditto for other privatization clauses. */
5771 if (is_simd)
5772 x = NULL_TREE;
5773 else
5775 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5776 get_name (var));
5777 gimple_add_tmp_var (x);
5778 TREE_ADDRESSABLE (x) = 1;
5779 x = build_fold_addr_expr_loc (clause_loc, x);
5782 else
5784 tree atmp
5785 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5786 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5787 tree al = size_int (TYPE_ALIGN (rtype));
5788 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5791 if (x)
5793 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5794 gimplify_assign (new_var, x, ilist);
5797 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5799 else if ((c_kind == OMP_CLAUSE_REDUCTION
5800 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5801 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5803 if (pass == 0)
5804 continue;
5806 else if (pass != 0)
5807 continue;
5809 switch (OMP_CLAUSE_CODE (c))
5811 case OMP_CLAUSE_SHARED:
5812 /* Ignore shared directives in teams construct inside
5813 target construct. */
5814 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5815 && !is_host_teams_ctx (ctx))
5816 continue;
5817 /* Shared global vars are just accessed directly. */
5818 if (is_global_var (new_var))
5819 break;
5820 /* For taskloop firstprivate/lastprivate, represented
5821 as firstprivate and shared clause on the task, new_var
5822 is the firstprivate var. */
5823 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5824 break;
5825 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5826 needs to be delayed until after fixup_child_record_type so
5827 that we get the correct type during the dereference. */
5828 by_ref = use_pointer_for_field (var, ctx);
5829 x = build_receiver_ref (var, by_ref, ctx);
5830 SET_DECL_VALUE_EXPR (new_var, x);
5831 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5833 /* ??? If VAR is not passed by reference, and the variable
5834 hasn't been initialized yet, then we'll get a warning for
5835 the store into the omp_data_s structure. Ideally, we'd be
5836 able to notice this and not store anything at all, but
5837 we're generating code too early. Suppress the warning. */
5838 if (!by_ref)
5839 suppress_warning (var, OPT_Wuninitialized);
5840 break;
5842 case OMP_CLAUSE__CONDTEMP_:
5843 if (is_parallel_ctx (ctx))
5845 x = build_receiver_ref (var, false, ctx);
5846 SET_DECL_VALUE_EXPR (new_var, x);
5847 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5849 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5851 x = build_zero_cst (TREE_TYPE (var));
5852 goto do_private;
5854 break;
5856 case OMP_CLAUSE_LASTPRIVATE:
5857 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5858 break;
5859 /* FALLTHRU */
5861 case OMP_CLAUSE_PRIVATE:
5862 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5863 x = build_outer_var_ref (var, ctx);
5864 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5866 if (is_task_ctx (ctx))
5867 x = build_receiver_ref (var, false, ctx);
5868 else
5869 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5871 else
5872 x = NULL;
5873 do_private:
5874 tree nx;
5875 bool copy_ctor;
5876 copy_ctor = false;
5877 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5878 ilist, ctx, false, NULL_TREE);
5879 nx = unshare_expr (new_var);
5880 if (is_simd
5881 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5882 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5883 copy_ctor = true;
5884 if (copy_ctor)
5885 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5886 else
5887 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5888 if (is_simd)
5890 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5891 if ((TREE_ADDRESSABLE (new_var) || nx || y
5892 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5893 && (gimple_omp_for_collapse (ctx->stmt) != 1
5894 || (gimple_omp_for_index (ctx->stmt, 0)
5895 != new_var)))
5896 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5897 || omp_privatize_by_reference (var))
5898 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5899 ivar, lvar))
5901 if (omp_privatize_by_reference (var))
5903 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5904 tree new_vard = TREE_OPERAND (new_var, 0);
5905 gcc_assert (DECL_P (new_vard));
5906 SET_DECL_VALUE_EXPR (new_vard,
5907 build_fold_addr_expr (lvar));
5908 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5911 if (nx)
5913 tree iv = unshare_expr (ivar);
5914 if (copy_ctor)
5915 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5917 else
5918 x = lang_hooks.decls.omp_clause_default_ctor (c,
5922 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5924 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5925 unshare_expr (ivar), x);
5926 nx = x;
5928 if (nx && x)
5929 gimplify_and_add (x, &llist[0]);
5930 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5931 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5933 tree v = new_var;
5934 if (!DECL_P (v))
5936 gcc_assert (TREE_CODE (v) == MEM_REF);
5937 v = TREE_OPERAND (v, 0);
5938 gcc_assert (DECL_P (v));
5940 v = *ctx->lastprivate_conditional_map->get (v);
5941 tree t = create_tmp_var (TREE_TYPE (v));
5942 tree z = build_zero_cst (TREE_TYPE (v));
5943 tree orig_v
5944 = build_outer_var_ref (var, ctx,
5945 OMP_CLAUSE_LASTPRIVATE);
5946 gimple_seq_add_stmt (dlist,
5947 gimple_build_assign (t, z));
5948 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5949 tree civar = DECL_VALUE_EXPR (v);
5950 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5951 civar = unshare_expr (civar);
5952 TREE_OPERAND (civar, 1) = sctx.idx;
5953 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5954 unshare_expr (civar));
5955 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5956 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5957 orig_v, unshare_expr (ivar)));
5958 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5959 civar);
5960 x = build3 (COND_EXPR, void_type_node, cond, x,
5961 void_node);
5962 gimple_seq tseq = NULL;
5963 gimplify_and_add (x, &tseq);
5964 if (ctx->outer)
5965 lower_omp (&tseq, ctx->outer);
5966 gimple_seq_add_seq (&llist[1], tseq);
5968 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5969 && ctx->for_simd_scan_phase)
5971 x = unshare_expr (ivar);
5972 tree orig_v
5973 = build_outer_var_ref (var, ctx,
5974 OMP_CLAUSE_LASTPRIVATE);
5975 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5976 orig_v);
5977 gimplify_and_add (x, &llist[0]);
5979 if (y)
5981 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5982 if (y)
5983 gimplify_and_add (y, &llist[1]);
5985 break;
5987 if (omp_privatize_by_reference (var))
5989 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5990 tree new_vard = TREE_OPERAND (new_var, 0);
5991 gcc_assert (DECL_P (new_vard));
5992 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5993 x = TYPE_SIZE_UNIT (type);
5994 if (TREE_CONSTANT (x))
5996 x = create_tmp_var_raw (type, get_name (var));
5997 gimple_add_tmp_var (x);
5998 TREE_ADDRESSABLE (x) = 1;
5999 x = build_fold_addr_expr_loc (clause_loc, x);
6000 x = fold_convert_loc (clause_loc,
6001 TREE_TYPE (new_vard), x);
6002 gimplify_assign (new_vard, x, ilist);
6006 if (nx)
6007 gimplify_and_add (nx, ilist);
6008 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6009 && is_simd
6010 && ctx->for_simd_scan_phase)
6012 tree orig_v = build_outer_var_ref (var, ctx,
6013 OMP_CLAUSE_LASTPRIVATE);
6014 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
6015 orig_v);
6016 gimplify_and_add (x, ilist);
6018 /* FALLTHRU */
6020 do_dtor:
6021 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
6022 if (x)
6023 gimplify_and_add (x, dlist);
6024 if (allocator)
6026 if (!is_gimple_val (allocator))
6028 tree avar = create_tmp_var (TREE_TYPE (allocator));
6029 gimplify_assign (avar, allocator, dlist);
6030 allocator = avar;
6032 if (!is_gimple_val (allocate_ptr))
6034 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6035 gimplify_assign (apvar, allocate_ptr, dlist);
6036 allocate_ptr = apvar;
6038 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
6039 gimple *g
6040 = gimple_build_call (f, 2, allocate_ptr, allocator);
6041 gimple_seq_add_stmt (dlist, g);
6043 break;
6045 case OMP_CLAUSE_LINEAR:
6046 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6047 goto do_firstprivate;
6048 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6049 x = NULL;
6050 else
6051 x = build_outer_var_ref (var, ctx);
6052 goto do_private;
6054 case OMP_CLAUSE_FIRSTPRIVATE:
6055 if (is_task_ctx (ctx))
6057 if ((omp_privatize_by_reference (var)
6058 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6059 || is_variable_sized (var))
6060 goto do_dtor;
6061 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
6062 ctx))
6063 || use_pointer_for_field (var, NULL))
6065 x = build_receiver_ref (var, false, ctx);
6066 if (ctx->allocate_map)
6067 if (tree *allocatep = ctx->allocate_map->get (var))
6069 allocator = *allocatep;
6070 if (TREE_CODE (allocator) == TREE_LIST)
6071 allocator = TREE_PURPOSE (allocator);
6072 if (TREE_CODE (allocator) != INTEGER_CST)
6073 allocator = build_outer_var_ref (allocator, ctx);
6074 allocator = fold_convert (pointer_sized_int_node,
6075 allocator);
6076 allocate_ptr = unshare_expr (x);
6077 x = build_simple_mem_ref (x);
6078 TREE_THIS_NOTRAP (x) = 1;
6080 SET_DECL_VALUE_EXPR (new_var, x);
6081 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6082 goto do_dtor;
6085 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6086 && omp_privatize_by_reference (var))
6088 x = build_outer_var_ref (var, ctx);
6089 gcc_assert (TREE_CODE (x) == MEM_REF
6090 && integer_zerop (TREE_OPERAND (x, 1)));
6091 x = TREE_OPERAND (x, 0);
6092 x = lang_hooks.decls.omp_clause_copy_ctor
6093 (c, unshare_expr (new_var), x);
6094 gimplify_and_add (x, ilist);
6095 goto do_dtor;
6097 do_firstprivate:
6098 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6099 ilist, ctx, false, NULL_TREE);
6100 x = build_outer_var_ref (var, ctx);
6101 if (is_simd)
6103 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6104 && gimple_omp_for_combined_into_p (ctx->stmt))
6106 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6107 if (DECL_P (t))
6108 t = build_outer_var_ref (t, ctx);
6109 tree stept = TREE_TYPE (t);
6110 tree ct = omp_find_clause (clauses,
6111 OMP_CLAUSE__LOOPTEMP_);
6112 gcc_assert (ct);
6113 tree l = OMP_CLAUSE_DECL (ct);
6114 tree n1 = fd->loop.n1;
6115 tree step = fd->loop.step;
6116 tree itype = TREE_TYPE (l);
6117 if (POINTER_TYPE_P (itype))
6118 itype = signed_type_for (itype);
6119 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6120 if (TYPE_UNSIGNED (itype)
6121 && fd->loop.cond_code == GT_EXPR)
6122 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6123 fold_build1 (NEGATE_EXPR, itype, l),
6124 fold_build1 (NEGATE_EXPR,
6125 itype, step));
6126 else
6127 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6128 t = fold_build2 (MULT_EXPR, stept,
6129 fold_convert (stept, l), t);
6131 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6133 if (omp_privatize_by_reference (var))
6135 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6136 tree new_vard = TREE_OPERAND (new_var, 0);
6137 gcc_assert (DECL_P (new_vard));
6138 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6139 nx = TYPE_SIZE_UNIT (type);
6140 if (TREE_CONSTANT (nx))
6142 nx = create_tmp_var_raw (type,
6143 get_name (var));
6144 gimple_add_tmp_var (nx);
6145 TREE_ADDRESSABLE (nx) = 1;
6146 nx = build_fold_addr_expr_loc (clause_loc,
6147 nx);
6148 nx = fold_convert_loc (clause_loc,
6149 TREE_TYPE (new_vard),
6150 nx);
6151 gimplify_assign (new_vard, nx, ilist);
6155 x = lang_hooks.decls.omp_clause_linear_ctor
6156 (c, new_var, x, t);
6157 gimplify_and_add (x, ilist);
6158 goto do_dtor;
6161 if (POINTER_TYPE_P (TREE_TYPE (x)))
6162 x = fold_build_pointer_plus (x, t);
6163 else
6164 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x,
6165 fold_convert (TREE_TYPE (x), t));
6168 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6169 || TREE_ADDRESSABLE (new_var)
6170 || omp_privatize_by_reference (var))
6171 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6172 ivar, lvar))
6174 if (omp_privatize_by_reference (var))
6176 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6177 tree new_vard = TREE_OPERAND (new_var, 0);
6178 gcc_assert (DECL_P (new_vard));
6179 SET_DECL_VALUE_EXPR (new_vard,
6180 build_fold_addr_expr (lvar));
6181 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6183 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6185 tree iv = create_tmp_var (TREE_TYPE (new_var));
6186 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6187 gimplify_and_add (x, ilist);
6188 gimple_stmt_iterator gsi
6189 = gsi_start (*gimple_omp_body_ptr (ctx->stmt));
6190 gassign *g
6191 = gimple_build_assign (unshare_expr (lvar), iv);
6192 gsi_insert_before_without_update (&gsi, g,
6193 GSI_SAME_STMT);
6194 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6195 enum tree_code code = PLUS_EXPR;
6196 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6197 code = POINTER_PLUS_EXPR;
6198 g = gimple_build_assign (iv, code, iv, t);
6199 gsi_insert_before_without_update (&gsi, g,
6200 GSI_SAME_STMT);
6201 break;
6203 x = lang_hooks.decls.omp_clause_copy_ctor
6204 (c, unshare_expr (ivar), x);
6205 gimplify_and_add (x, &llist[0]);
6206 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6207 if (x)
6208 gimplify_and_add (x, &llist[1]);
6209 break;
6211 if (omp_privatize_by_reference (var))
6213 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6214 tree new_vard = TREE_OPERAND (new_var, 0);
6215 gcc_assert (DECL_P (new_vard));
6216 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6217 nx = TYPE_SIZE_UNIT (type);
6218 if (TREE_CONSTANT (nx))
6220 nx = create_tmp_var_raw (type, get_name (var));
6221 gimple_add_tmp_var (nx);
6222 TREE_ADDRESSABLE (nx) = 1;
6223 nx = build_fold_addr_expr_loc (clause_loc, nx);
6224 nx = fold_convert_loc (clause_loc,
6225 TREE_TYPE (new_vard), nx);
6226 gimplify_assign (new_vard, nx, ilist);
6230 x = lang_hooks.decls.omp_clause_copy_ctor
6231 (c, unshare_expr (new_var), x);
6232 gimplify_and_add (x, ilist);
6233 goto do_dtor;
6235 case OMP_CLAUSE__LOOPTEMP_:
6236 case OMP_CLAUSE__REDUCTEMP_:
6237 gcc_assert (is_taskreg_ctx (ctx));
6238 x = build_outer_var_ref (var, ctx);
6239 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6240 gimplify_and_add (x, ilist);
6241 break;
6243 case OMP_CLAUSE_COPYIN:
6244 by_ref = use_pointer_for_field (var, NULL);
6245 x = build_receiver_ref (var, by_ref, ctx);
6246 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6247 append_to_statement_list (x, &copyin_seq);
6248 copyin_by_ref |= by_ref;
6249 break;
6251 case OMP_CLAUSE_REDUCTION:
6252 case OMP_CLAUSE_IN_REDUCTION:
6253 /* OpenACC reductions are initialized using the
6254 GOACC_REDUCTION internal function. */
6255 if (is_gimple_omp_oacc (ctx->stmt))
6256 break;
6257 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6259 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6260 gimple *tseq;
6261 tree ptype = TREE_TYPE (placeholder);
6262 if (cond)
6264 x = error_mark_node;
6265 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6266 && !task_reduction_needs_orig_p)
6267 x = var;
6268 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6270 tree pptype = build_pointer_type (ptype);
6271 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6272 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6273 size_int (task_reduction_cnt_full
6274 + task_reduction_cntorig - 1),
6275 NULL_TREE, NULL_TREE);
6276 else
6278 unsigned int idx
6279 = *ctx->task_reduction_map->get (c);
6280 x = task_reduction_read (ilist, tskred_temp,
6281 pptype, 7 + 3 * idx);
6283 x = fold_convert (pptype, x);
6284 x = build_simple_mem_ref (x);
6287 else
6289 lower_private_allocate (var, new_var, allocator,
6290 allocate_ptr, ilist, ctx, false,
6291 NULL_TREE);
6292 x = build_outer_var_ref (var, ctx);
6294 if (omp_privatize_by_reference (var)
6295 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6296 x = build_fold_addr_expr_loc (clause_loc, x);
6298 SET_DECL_VALUE_EXPR (placeholder, x);
6299 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6300 tree new_vard = new_var;
6301 if (omp_privatize_by_reference (var))
6303 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6304 new_vard = TREE_OPERAND (new_var, 0);
6305 gcc_assert (DECL_P (new_vard));
6307 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6308 if (is_simd
6309 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6310 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6311 rvarp = &rvar;
6312 if (is_simd
6313 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6314 ivar, lvar, rvarp,
6315 &rvar2))
6317 if (new_vard == new_var)
6319 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6320 SET_DECL_VALUE_EXPR (new_var, ivar);
6322 else
6324 SET_DECL_VALUE_EXPR (new_vard,
6325 build_fold_addr_expr (ivar));
6326 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6328 x = lang_hooks.decls.omp_clause_default_ctor
6329 (c, unshare_expr (ivar),
6330 build_outer_var_ref (var, ctx));
6331 if (rvarp && ctx->for_simd_scan_phase)
6333 if (x)
6334 gimplify_and_add (x, &llist[0]);
6335 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6336 if (x)
6337 gimplify_and_add (x, &llist[1]);
6338 break;
6340 else if (rvarp)
6342 if (x)
6344 gimplify_and_add (x, &llist[0]);
6346 tree ivar2 = unshare_expr (lvar);
6347 TREE_OPERAND (ivar2, 1) = sctx.idx;
6348 x = lang_hooks.decls.omp_clause_default_ctor
6349 (c, ivar2, build_outer_var_ref (var, ctx));
6350 gimplify_and_add (x, &llist[0]);
6352 if (rvar2)
6354 x = lang_hooks.decls.omp_clause_default_ctor
6355 (c, unshare_expr (rvar2),
6356 build_outer_var_ref (var, ctx));
6357 gimplify_and_add (x, &llist[0]);
6360 /* For types that need construction, add another
6361 private var which will be default constructed
6362 and optionally initialized with
6363 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6364 loop we want to assign this value instead of
6365 constructing and destructing it in each
6366 iteration. */
6367 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6368 gimple_add_tmp_var (nv);
6369 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6370 ? rvar2
6371 : ivar, 0),
6372 nv);
6373 x = lang_hooks.decls.omp_clause_default_ctor
6374 (c, nv, build_outer_var_ref (var, ctx));
6375 gimplify_and_add (x, ilist);
6377 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6379 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6380 x = DECL_VALUE_EXPR (new_vard);
6381 tree vexpr = nv;
6382 if (new_vard != new_var)
6383 vexpr = build_fold_addr_expr (nv);
6384 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6385 lower_omp (&tseq, ctx);
6386 SET_DECL_VALUE_EXPR (new_vard, x);
6387 gimple_seq_add_seq (ilist, tseq);
6388 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6391 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6392 if (x)
6393 gimplify_and_add (x, dlist);
6396 tree ref = build_outer_var_ref (var, ctx);
6397 x = unshare_expr (ivar);
6398 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6399 ref);
6400 gimplify_and_add (x, &llist[0]);
6402 ref = build_outer_var_ref (var, ctx);
6403 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6404 rvar);
6405 gimplify_and_add (x, &llist[3]);
6407 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6408 if (new_vard == new_var)
6409 SET_DECL_VALUE_EXPR (new_var, lvar);
6410 else
6411 SET_DECL_VALUE_EXPR (new_vard,
6412 build_fold_addr_expr (lvar));
6414 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6415 if (x)
6416 gimplify_and_add (x, &llist[1]);
6418 tree ivar2 = unshare_expr (lvar);
6419 TREE_OPERAND (ivar2, 1) = sctx.idx;
6420 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6421 if (x)
6422 gimplify_and_add (x, &llist[1]);
6424 if (rvar2)
6426 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6427 if (x)
6428 gimplify_and_add (x, &llist[1]);
6430 break;
6432 if (x)
6433 gimplify_and_add (x, &llist[0]);
6434 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6436 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6437 lower_omp (&tseq, ctx);
6438 gimple_seq_add_seq (&llist[0], tseq);
6440 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6441 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6442 lower_omp (&tseq, ctx);
6443 gimple_seq_add_seq (&llist[1], tseq);
6444 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6445 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6446 if (new_vard == new_var)
6447 SET_DECL_VALUE_EXPR (new_var, lvar);
6448 else
6449 SET_DECL_VALUE_EXPR (new_vard,
6450 build_fold_addr_expr (lvar));
6451 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6452 if (x)
6453 gimplify_and_add (x, &llist[1]);
6454 break;
6456 /* If this is a reference to constant size reduction var
6457 with placeholder, we haven't emitted the initializer
6458 for it because it is undesirable if SIMD arrays are used.
6459 But if they aren't used, we need to emit the deferred
6460 initialization now. */
6461 else if (omp_privatize_by_reference (var) && is_simd)
6462 handle_simd_reference (clause_loc, new_vard, ilist);
6464 tree lab2 = NULL_TREE;
6465 if (cond)
6467 gimple *g;
6468 if (!is_parallel_ctx (ctx))
6470 tree condv = create_tmp_var (boolean_type_node);
6471 tree m = build_simple_mem_ref (cond);
6472 g = gimple_build_assign (condv, m);
6473 gimple_seq_add_stmt (ilist, g);
6474 tree lab1
6475 = create_artificial_label (UNKNOWN_LOCATION);
6476 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6477 g = gimple_build_cond (NE_EXPR, condv,
6478 boolean_false_node,
6479 lab2, lab1);
6480 gimple_seq_add_stmt (ilist, g);
6481 gimple_seq_add_stmt (ilist,
6482 gimple_build_label (lab1));
6484 g = gimple_build_assign (build_simple_mem_ref (cond),
6485 boolean_true_node);
6486 gimple_seq_add_stmt (ilist, g);
6488 x = lang_hooks.decls.omp_clause_default_ctor
6489 (c, unshare_expr (new_var),
6490 cond ? NULL_TREE
6491 : build_outer_var_ref (var, ctx));
6492 if (x)
6493 gimplify_and_add (x, ilist);
6495 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6496 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6498 if (ctx->for_simd_scan_phase)
6499 goto do_dtor;
6500 if (x || (!is_simd
6501 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6503 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6504 gimple_add_tmp_var (nv);
6505 ctx->cb.decl_map->put (new_vard, nv);
6506 x = lang_hooks.decls.omp_clause_default_ctor
6507 (c, nv, build_outer_var_ref (var, ctx));
6508 if (x)
6509 gimplify_and_add (x, ilist);
6510 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6512 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6513 tree vexpr = nv;
6514 if (new_vard != new_var)
6515 vexpr = build_fold_addr_expr (nv);
6516 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6517 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6518 lower_omp (&tseq, ctx);
6519 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6520 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6521 gimple_seq_add_seq (ilist, tseq);
6523 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6524 if (is_simd && ctx->scan_exclusive)
6526 tree nv2
6527 = create_tmp_var_raw (TREE_TYPE (new_var));
6528 gimple_add_tmp_var (nv2);
6529 ctx->cb.decl_map->put (nv, nv2);
6530 x = lang_hooks.decls.omp_clause_default_ctor
6531 (c, nv2, build_outer_var_ref (var, ctx));
6532 gimplify_and_add (x, ilist);
6533 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6534 if (x)
6535 gimplify_and_add (x, dlist);
6537 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6538 if (x)
6539 gimplify_and_add (x, dlist);
6541 else if (is_simd
6542 && ctx->scan_exclusive
6543 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6545 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6546 gimple_add_tmp_var (nv2);
6547 ctx->cb.decl_map->put (new_vard, nv2);
6548 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6549 if (x)
6550 gimplify_and_add (x, dlist);
6552 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6553 goto do_dtor;
6556 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6558 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6559 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6560 && is_omp_target (ctx->stmt))
6562 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6563 tree oldv = NULL_TREE;
6564 gcc_assert (d);
6565 if (DECL_HAS_VALUE_EXPR_P (d))
6566 oldv = DECL_VALUE_EXPR (d);
6567 SET_DECL_VALUE_EXPR (d, new_vard);
6568 DECL_HAS_VALUE_EXPR_P (d) = 1;
6569 lower_omp (&tseq, ctx);
6570 if (oldv)
6571 SET_DECL_VALUE_EXPR (d, oldv);
6572 else
6574 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6575 DECL_HAS_VALUE_EXPR_P (d) = 0;
6578 else
6579 lower_omp (&tseq, ctx);
6580 gimple_seq_add_seq (ilist, tseq);
6582 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6583 if (is_simd)
6585 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6586 lower_omp (&tseq, ctx);
6587 gimple_seq_add_seq (dlist, tseq);
6588 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6590 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6591 if (cond)
6593 if (lab2)
6594 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6595 break;
6597 goto do_dtor;
6599 else
6601 x = omp_reduction_init (c, TREE_TYPE (new_var));
6602 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6603 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6605 if (cond)
6607 gimple *g;
6608 tree lab2 = NULL_TREE;
6609 /* GOMP_taskgroup_reduction_register memsets the whole
6610 array to zero. If the initializer is zero, we don't
6611 need to initialize it again, just mark it as ever
6612 used unconditionally, i.e. cond = true. */
6613 if (initializer_zerop (x))
6615 g = gimple_build_assign (build_simple_mem_ref (cond),
6616 boolean_true_node);
6617 gimple_seq_add_stmt (ilist, g);
6618 break;
6621 /* Otherwise, emit
6622 if (!cond) { cond = true; new_var = x; } */
6623 if (!is_parallel_ctx (ctx))
6625 tree condv = create_tmp_var (boolean_type_node);
6626 tree m = build_simple_mem_ref (cond);
6627 g = gimple_build_assign (condv, m);
6628 gimple_seq_add_stmt (ilist, g);
6629 tree lab1
6630 = create_artificial_label (UNKNOWN_LOCATION);
6631 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6632 g = gimple_build_cond (NE_EXPR, condv,
6633 boolean_false_node,
6634 lab2, lab1);
6635 gimple_seq_add_stmt (ilist, g);
6636 gimple_seq_add_stmt (ilist,
6637 gimple_build_label (lab1));
6639 g = gimple_build_assign (build_simple_mem_ref (cond),
6640 boolean_true_node);
6641 gimple_seq_add_stmt (ilist, g);
6642 gimplify_assign (new_var, x, ilist);
6643 if (lab2)
6644 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6645 break;
6648 /* reduction(-:var) sums up the partial results, so it
6649 acts identically to reduction(+:var). */
6650 if (code == MINUS_EXPR)
6651 code = PLUS_EXPR;
6653 bool is_truth_op
6654 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6655 tree new_vard = new_var;
6656 if (is_simd && omp_privatize_by_reference (var))
6658 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6659 new_vard = TREE_OPERAND (new_var, 0);
6660 gcc_assert (DECL_P (new_vard));
6662 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6663 if (is_simd
6664 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6665 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6666 rvarp = &rvar;
6667 if (is_simd
6668 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6669 ivar, lvar, rvarp,
6670 &rvar2))
6672 if (new_vard != new_var)
6674 SET_DECL_VALUE_EXPR (new_vard,
6675 build_fold_addr_expr (lvar));
6676 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6679 tree ref = build_outer_var_ref (var, ctx);
6681 if (rvarp)
6683 if (ctx->for_simd_scan_phase)
6684 break;
6685 gimplify_assign (ivar, ref, &llist[0]);
6686 ref = build_outer_var_ref (var, ctx);
6687 gimplify_assign (ref, rvar, &llist[3]);
6688 break;
6691 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6693 if (sctx.is_simt)
6695 if (!simt_lane)
6696 simt_lane = create_tmp_var (unsigned_type_node);
6697 x = build_call_expr_internal_loc
6698 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6699 TREE_TYPE (ivar), 2, ivar, simt_lane);
6700 /* Make sure x is evaluated unconditionally. */
6701 tree bfly_var = create_tmp_var (TREE_TYPE (ivar));
6702 gimplify_assign (bfly_var, x, &llist[2]);
6703 x = build2 (code, TREE_TYPE (ivar), ivar, bfly_var);
6704 gimplify_assign (ivar, x, &llist[2]);
6706 tree ivar2 = ivar;
6707 tree ref2 = ref;
6708 if (is_truth_op)
6710 tree zero = build_zero_cst (TREE_TYPE (ivar));
6711 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6712 boolean_type_node, ivar,
6713 zero);
6714 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6715 boolean_type_node, ref,
6716 zero);
6718 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6719 if (is_truth_op)
6720 x = fold_convert (TREE_TYPE (ref), x);
6721 ref = build_outer_var_ref (var, ctx);
6722 gimplify_assign (ref, x, &llist[1]);
6725 else
6727 lower_private_allocate (var, new_var, allocator,
6728 allocate_ptr, ilist, ctx,
6729 false, NULL_TREE);
6730 if (omp_privatize_by_reference (var) && is_simd)
6731 handle_simd_reference (clause_loc, new_vard, ilist);
6732 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6733 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6734 break;
6735 gimplify_assign (new_var, x, ilist);
6736 if (is_simd)
6738 tree ref = build_outer_var_ref (var, ctx);
6739 tree new_var2 = new_var;
6740 tree ref2 = ref;
6741 if (is_truth_op)
6743 tree zero = build_zero_cst (TREE_TYPE (new_var));
6744 new_var2
6745 = fold_build2_loc (clause_loc, NE_EXPR,
6746 boolean_type_node, new_var,
6747 zero);
6748 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6749 boolean_type_node, ref,
6750 zero);
6752 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6753 if (is_truth_op)
6754 x = fold_convert (TREE_TYPE (new_var), x);
6755 ref = build_outer_var_ref (var, ctx);
6756 gimplify_assign (ref, x, dlist);
6758 if (allocator)
6759 goto do_dtor;
6762 break;
6764 default:
6765 gcc_unreachable ();
6769 if (tskred_avar)
6771 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6772 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6775 if (known_eq (sctx.max_vf, 1U))
6777 sctx.is_simt = false;
6778 if (ctx->lastprivate_conditional_map)
6780 if (gimple_omp_for_combined_into_p (ctx->stmt))
6782 /* Signal to lower_omp_1 that it should use parent context. */
6783 ctx->combined_into_simd_safelen1 = true;
6784 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6785 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6786 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6788 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6789 omp_context *outer = ctx->outer;
6790 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6791 outer = outer->outer;
6792 tree *v = ctx->lastprivate_conditional_map->get (o);
6793 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6794 tree *pv = outer->lastprivate_conditional_map->get (po);
6795 *v = *pv;
6798 else
6800 /* When not vectorized, treat lastprivate(conditional:) like
6801 normal lastprivate, as there will be just one simd lane
6802 writing the privatized variable. */
6803 delete ctx->lastprivate_conditional_map;
6804 ctx->lastprivate_conditional_map = NULL;
6809 if (nonconst_simd_if)
6811 if (sctx.lane == NULL_TREE)
6813 sctx.idx = create_tmp_var (unsigned_type_node);
6814 sctx.lane = create_tmp_var (unsigned_type_node);
6816 /* FIXME: For now. */
6817 sctx.is_simt = false;
6820 if (sctx.lane || sctx.is_simt)
6822 uid = create_tmp_var (ptr_type_node, "simduid");
6823 /* Don't want uninit warnings on simduid, it is always uninitialized,
6824 but we use it not for the value, but for the DECL_UID only. */
6825 suppress_warning (uid, OPT_Wuninitialized);
6826 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6827 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6828 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6829 gimple_omp_for_set_clauses (ctx->stmt, c);
6831 /* Emit calls denoting privatized variables and initializing a pointer to
6832 structure that holds private variables as fields after ompdevlow pass. */
6833 if (sctx.is_simt)
6835 sctx.simt_eargs[0] = uid;
6836 gimple *g
6837 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6838 gimple_call_set_lhs (g, uid);
6839 gimple_seq_add_stmt (ilist, g);
6840 sctx.simt_eargs.release ();
6842 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6843 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6844 gimple_call_set_lhs (g, simtrec);
6845 gimple_seq_add_stmt (ilist, g);
6847 if (sctx.lane)
6849 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6850 2 + (nonconst_simd_if != NULL),
6851 uid, integer_zero_node,
6852 nonconst_simd_if);
6853 gimple_call_set_lhs (g, sctx.lane);
6854 gimple_stmt_iterator gsi = gsi_start (*gimple_omp_body_ptr (ctx->stmt));
6855 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6856 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6857 build_int_cst (unsigned_type_node, 0));
6858 gimple_seq_add_stmt (ilist, g);
6859 if (sctx.lastlane)
6861 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6862 2, uid, sctx.lane);
6863 gimple_call_set_lhs (g, sctx.lastlane);
6864 gimple_seq_add_stmt (dlist, g);
6865 gimple_seq_add_seq (dlist, llist[3]);
6867 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6868 if (llist[2])
6870 tree simt_vf = create_tmp_var (unsigned_type_node);
6871 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6872 gimple_call_set_lhs (g, simt_vf);
6873 gimple_seq_add_stmt (dlist, g);
6875 tree t = build_int_cst (unsigned_type_node, 1);
6876 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6877 gimple_seq_add_stmt (dlist, g);
6879 t = build_int_cst (unsigned_type_node, 0);
6880 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6881 gimple_seq_add_stmt (dlist, g);
6883 tree body = create_artificial_label (UNKNOWN_LOCATION);
6884 tree header = create_artificial_label (UNKNOWN_LOCATION);
6885 tree end = create_artificial_label (UNKNOWN_LOCATION);
6886 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6887 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6889 gimple_seq_add_seq (dlist, llist[2]);
6891 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6892 gimple_seq_add_stmt (dlist, g);
6894 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6895 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6896 gimple_seq_add_stmt (dlist, g);
6898 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6900 for (int i = 0; i < 2; i++)
6901 if (llist[i])
6903 tree vf = create_tmp_var (unsigned_type_node);
6904 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6905 gimple_call_set_lhs (g, vf);
6906 gimple_seq *seq = i == 0 ? ilist : dlist;
6907 gimple_seq_add_stmt (seq, g);
6908 tree t = build_int_cst (unsigned_type_node, 0);
6909 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6910 gimple_seq_add_stmt (seq, g);
6911 tree body = create_artificial_label (UNKNOWN_LOCATION);
6912 tree header = create_artificial_label (UNKNOWN_LOCATION);
6913 tree end = create_artificial_label (UNKNOWN_LOCATION);
6914 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6915 gimple_seq_add_stmt (seq, gimple_build_label (body));
6916 gimple_seq_add_seq (seq, llist[i]);
6917 t = build_int_cst (unsigned_type_node, 1);
6918 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6919 gimple_seq_add_stmt (seq, g);
6920 gimple_seq_add_stmt (seq, gimple_build_label (header));
6921 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6922 gimple_seq_add_stmt (seq, g);
6923 gimple_seq_add_stmt (seq, gimple_build_label (end));
6926 if (sctx.is_simt)
6928 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6929 gimple *g
6930 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6931 gimple_seq_add_stmt (dlist, g);
6934 /* The copyin sequence is not to be executed by the main thread, since
6935 that would result in self-copies. Perhaps not visible to scalars,
6936 but it certainly is to C++ operator=. */
6937 if (copyin_seq)
6939 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6941 x = build2 (NE_EXPR, boolean_type_node, x,
6942 build_int_cst (TREE_TYPE (x), 0));
6943 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6944 gimplify_and_add (x, ilist);
6947 /* If any copyin variable is passed by reference, we must ensure the
6948 master thread doesn't modify it before it is copied over in all
6949 threads. Similarly for variables in both firstprivate and
6950 lastprivate clauses we need to ensure the lastprivate copying
6951 happens after firstprivate copying in all threads. And similarly
6952 for UDRs if initializer expression refers to omp_orig. */
6953 if (copyin_by_ref || lastprivate_firstprivate
6954 || (reduction_omp_orig_ref
6955 && !ctx->scan_inclusive
6956 && !ctx->scan_exclusive))
6958 /* Don't add any barrier for #pragma omp simd or
6959 #pragma omp distribute. */
6960 if (!is_task_ctx (ctx)
6961 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6962 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6963 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6966 /* If max_vf is non-zero, then we can use only a vectorization factor
6967 up to the max_vf we chose. So stick it into the safelen clause. */
6968 if (maybe_ne (sctx.max_vf, 0U))
6970 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6971 OMP_CLAUSE_SAFELEN);
6972 poly_uint64 safe_len;
6973 if (c == NULL_TREE
6974 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6975 && maybe_gt (safe_len, sctx.max_vf)))
6977 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6978 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6979 sctx.max_vf);
6980 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6981 gimple_omp_for_set_clauses (ctx->stmt, c);
6986 /* Create temporary variables for lastprivate(conditional:) implementation
6987 in context CTX with CLAUSES. */
6989 static void
6990 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6992 tree iter_type = NULL_TREE;
6993 tree cond_ptr = NULL_TREE;
6994 tree iter_var = NULL_TREE;
6995 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6996 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6997 tree next = *clauses;
6998 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6999 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7000 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
7002 if (is_simd)
7004 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
7005 gcc_assert (cc);
7006 if (iter_type == NULL_TREE)
7008 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
7009 iter_var = create_tmp_var_raw (iter_type);
7010 DECL_CONTEXT (iter_var) = current_function_decl;
7011 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7012 DECL_CHAIN (iter_var) = ctx->block_vars;
7013 ctx->block_vars = iter_var;
7014 tree c3
7015 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7016 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7017 OMP_CLAUSE_DECL (c3) = iter_var;
7018 OMP_CLAUSE_CHAIN (c3) = *clauses;
7019 *clauses = c3;
7020 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7022 next = OMP_CLAUSE_CHAIN (cc);
7023 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7024 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
7025 ctx->lastprivate_conditional_map->put (o, v);
7026 continue;
7028 if (iter_type == NULL)
7030 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
7032 struct omp_for_data fd;
7033 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
7034 NULL);
7035 iter_type = unsigned_type_for (fd.iter_type);
7037 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
7038 iter_type = unsigned_type_node;
7039 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
7040 if (c2)
7042 cond_ptr
7043 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7044 OMP_CLAUSE_DECL (c2) = cond_ptr;
7046 else
7048 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7049 DECL_CONTEXT (cond_ptr) = current_function_decl;
7050 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7051 DECL_CHAIN (cond_ptr) = ctx->block_vars;
7052 ctx->block_vars = cond_ptr;
7053 c2 = build_omp_clause (UNKNOWN_LOCATION,
7054 OMP_CLAUSE__CONDTEMP_);
7055 OMP_CLAUSE_DECL (c2) = cond_ptr;
7056 OMP_CLAUSE_CHAIN (c2) = *clauses;
7057 *clauses = c2;
7059 iter_var = create_tmp_var_raw (iter_type);
7060 DECL_CONTEXT (iter_var) = current_function_decl;
7061 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7062 DECL_CHAIN (iter_var) = ctx->block_vars;
7063 ctx->block_vars = iter_var;
7064 tree c3
7065 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7066 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7067 OMP_CLAUSE_DECL (c3) = iter_var;
7068 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7069 OMP_CLAUSE_CHAIN (c2) = c3;
7070 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7072 tree v = create_tmp_var_raw (iter_type);
7073 DECL_CONTEXT (v) = current_function_decl;
7074 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7075 DECL_CHAIN (v) = ctx->block_vars;
7076 ctx->block_vars = v;
7077 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7078 ctx->lastprivate_conditional_map->put (o, v);
7083 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7084 both parallel and workshare constructs. PREDICATE may be NULL if it's
7085 always true. BODY_P is the sequence to insert early initialization
7086 if needed, STMT_LIST is where the non-conditional lastprivate handling
7087 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7088 section. */
7090 static void
7091 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7092 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7093 omp_context *ctx)
7095 tree x, c, label = NULL, orig_clauses = clauses;
7096 bool par_clauses = false;
7097 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7098 unsigned HOST_WIDE_INT conditional_off = 0;
7099 gimple_seq post_stmt_list = NULL;
7101 /* Early exit if there are no lastprivate or linear clauses. */
7102 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7103 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7104 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7105 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7106 break;
7107 if (clauses == NULL)
7109 /* If this was a workshare clause, see if it had been combined
7110 with its parallel. In that case, look for the clauses on the
7111 parallel statement itself. */
7112 if (is_parallel_ctx (ctx))
7113 return;
7115 ctx = ctx->outer;
7116 if (ctx == NULL || !is_parallel_ctx (ctx))
7117 return;
7119 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7120 OMP_CLAUSE_LASTPRIVATE);
7121 if (clauses == NULL)
7122 return;
7123 par_clauses = true;
7126 bool maybe_simt = false;
7127 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7128 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7130 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7131 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7132 if (simduid)
7133 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7136 if (predicate)
7138 gcond *stmt;
7139 tree label_true, arm1, arm2;
7140 enum tree_code pred_code = TREE_CODE (predicate);
7142 label = create_artificial_label (UNKNOWN_LOCATION);
7143 label_true = create_artificial_label (UNKNOWN_LOCATION);
7144 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7146 arm1 = TREE_OPERAND (predicate, 0);
7147 arm2 = TREE_OPERAND (predicate, 1);
7148 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7149 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7151 else
7153 arm1 = predicate;
7154 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7155 arm2 = boolean_false_node;
7156 pred_code = NE_EXPR;
7158 if (maybe_simt)
7160 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7161 c = fold_convert (integer_type_node, c);
7162 simtcond = create_tmp_var (integer_type_node);
7163 gimplify_assign (simtcond, c, stmt_list);
7164 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7165 1, simtcond);
7166 c = create_tmp_var (integer_type_node);
7167 gimple_call_set_lhs (g, c);
7168 gimple_seq_add_stmt (stmt_list, g);
7169 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7170 label_true, label);
7172 else
7173 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7174 gimple_seq_add_stmt (stmt_list, stmt);
7175 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7178 tree cond_ptr = NULL_TREE;
7179 for (c = clauses; c ;)
7181 tree var, new_var;
7182 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7183 gimple_seq *this_stmt_list = stmt_list;
7184 tree lab2 = NULL_TREE;
7186 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7187 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7188 && ctx->lastprivate_conditional_map
7189 && !ctx->combined_into_simd_safelen1)
7191 gcc_assert (body_p);
7192 if (simduid)
7193 goto next;
7194 if (cond_ptr == NULL_TREE)
7196 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7197 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7199 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7200 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7201 tree v = *ctx->lastprivate_conditional_map->get (o);
7202 gimplify_assign (v, build_zero_cst (type), body_p);
7203 this_stmt_list = cstmt_list;
7204 tree mem;
7205 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7207 mem = build2 (MEM_REF, type, cond_ptr,
7208 build_int_cst (TREE_TYPE (cond_ptr),
7209 conditional_off));
7210 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7212 else
7213 mem = build4 (ARRAY_REF, type, cond_ptr,
7214 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7215 tree mem2 = copy_node (mem);
7216 gimple_seq seq = NULL;
7217 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7218 gimple_seq_add_seq (this_stmt_list, seq);
7219 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7220 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7221 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7222 gimple_seq_add_stmt (this_stmt_list, g);
7223 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7224 gimplify_assign (mem2, v, this_stmt_list);
7226 else if (predicate
7227 && ctx->combined_into_simd_safelen1
7228 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7229 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7230 && ctx->lastprivate_conditional_map)
7231 this_stmt_list = &post_stmt_list;
7233 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7234 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7235 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7237 var = OMP_CLAUSE_DECL (c);
7238 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7239 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7240 && is_taskloop_ctx (ctx))
7242 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7243 new_var = lookup_decl (var, ctx->outer);
7245 else
7247 new_var = lookup_decl (var, ctx);
7248 /* Avoid uninitialized warnings for lastprivate and
7249 for linear iterators. */
7250 if (predicate
7251 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7252 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7253 suppress_warning (new_var, OPT_Wuninitialized);
7256 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7258 tree val = DECL_VALUE_EXPR (new_var);
7259 if (TREE_CODE (val) == ARRAY_REF
7260 && VAR_P (TREE_OPERAND (val, 0))
7261 && lookup_attribute ("omp simd array",
7262 DECL_ATTRIBUTES (TREE_OPERAND (val,
7263 0))))
7265 if (lastlane == NULL)
7267 lastlane = create_tmp_var (unsigned_type_node);
7268 gcall *g
7269 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7270 2, simduid,
7271 TREE_OPERAND (val, 1));
7272 gimple_call_set_lhs (g, lastlane);
7273 gimple_seq_add_stmt (this_stmt_list, g);
7275 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7276 TREE_OPERAND (val, 0), lastlane,
7277 NULL_TREE, NULL_TREE);
7278 TREE_THIS_NOTRAP (new_var) = 1;
7281 else if (maybe_simt)
7283 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7284 ? DECL_VALUE_EXPR (new_var)
7285 : new_var);
7286 if (simtlast == NULL)
7288 simtlast = create_tmp_var (unsigned_type_node);
7289 gcall *g = gimple_build_call_internal
7290 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7291 gimple_call_set_lhs (g, simtlast);
7292 gimple_seq_add_stmt (this_stmt_list, g);
7294 x = build_call_expr_internal_loc
7295 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7296 TREE_TYPE (val), 2, val, simtlast);
7297 new_var = unshare_expr (new_var);
7298 gimplify_assign (new_var, x, this_stmt_list);
7299 new_var = unshare_expr (new_var);
7302 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7303 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7305 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7306 gimple_seq_add_seq (this_stmt_list,
7307 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7308 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7310 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7311 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7313 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7314 gimple_seq_add_seq (this_stmt_list,
7315 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7316 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7319 x = NULL_TREE;
7320 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7321 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7322 && is_taskloop_ctx (ctx))
7324 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7325 ctx->outer->outer);
7326 if (is_global_var (ovar))
7327 x = ovar;
7329 if (!x)
7330 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7331 if (omp_privatize_by_reference (var))
7332 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7333 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7334 gimplify_and_add (x, this_stmt_list);
7336 if (lab2)
7337 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7340 next:
7341 c = OMP_CLAUSE_CHAIN (c);
7342 if (c == NULL && !par_clauses)
7344 /* If this was a workshare clause, see if it had been combined
7345 with its parallel. In that case, continue looking for the
7346 clauses also on the parallel statement itself. */
7347 if (is_parallel_ctx (ctx))
7348 break;
7350 ctx = ctx->outer;
7351 if (ctx == NULL || !is_parallel_ctx (ctx))
7352 break;
7354 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7355 OMP_CLAUSE_LASTPRIVATE);
7356 par_clauses = true;
7360 if (label)
7361 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7362 gimple_seq_add_seq (stmt_list, post_stmt_list);
7365 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7366 (which might be a placeholder). INNER is true if this is an inner
7367 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7368 join markers. Generate the before-loop forking sequence in
7369 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7370 general form of these sequences is
7372 GOACC_REDUCTION_SETUP
7373 GOACC_FORK
7374 GOACC_REDUCTION_INIT
7376 GOACC_REDUCTION_FINI
7377 GOACC_JOIN
7378 GOACC_REDUCTION_TEARDOWN. */
7380 static void
7381 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7382 gcall *fork, gcall *private_marker, gcall *join,
7383 gimple_seq *fork_seq, gimple_seq *join_seq,
7384 omp_context *ctx)
7386 gimple_seq before_fork = NULL;
7387 gimple_seq after_fork = NULL;
7388 gimple_seq before_join = NULL;
7389 gimple_seq after_join = NULL;
7390 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7391 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7392 unsigned offset = 0;
7394 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7395 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7397 /* No 'reduction' clauses on OpenACC 'kernels'. */
7398 gcc_checking_assert (!is_oacc_kernels (ctx));
7399 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7400 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7402 tree orig = OMP_CLAUSE_DECL (c);
7403 tree var = maybe_lookup_decl (orig, ctx);
7404 tree ref_to_res = NULL_TREE;
7405 tree incoming, outgoing, v1, v2, v3;
7406 bool is_private = false;
7408 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7409 if (rcode == MINUS_EXPR)
7410 rcode = PLUS_EXPR;
7411 else if (rcode == TRUTH_ANDIF_EXPR)
7412 rcode = BIT_AND_EXPR;
7413 else if (rcode == TRUTH_ORIF_EXPR)
7414 rcode = BIT_IOR_EXPR;
7415 tree op = build_int_cst (unsigned_type_node, rcode);
7417 if (!var)
7418 var = orig;
7420 incoming = outgoing = var;
7422 if (!inner)
7424 /* See if an outer construct also reduces this variable. */
7425 omp_context *outer = ctx;
7427 while (omp_context *probe = outer->outer)
7429 enum gimple_code type = gimple_code (probe->stmt);
7430 tree cls;
7432 switch (type)
7434 case GIMPLE_OMP_FOR:
7435 cls = gimple_omp_for_clauses (probe->stmt);
7436 break;
7438 case GIMPLE_OMP_TARGET:
7439 /* No 'reduction' clauses inside OpenACC 'kernels'
7440 regions. */
7441 gcc_checking_assert (!is_oacc_kernels (probe));
7443 if (!is_gimple_omp_offloaded (probe->stmt))
7444 goto do_lookup;
7446 cls = gimple_omp_target_clauses (probe->stmt);
7447 break;
7449 default:
7450 goto do_lookup;
7453 outer = probe;
7454 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7455 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7456 && orig == OMP_CLAUSE_DECL (cls))
7458 incoming = outgoing = lookup_decl (orig, probe);
7459 goto has_outer_reduction;
7461 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7462 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7463 && orig == OMP_CLAUSE_DECL (cls))
7465 is_private = true;
7466 goto do_lookup;
7470 do_lookup:
7471 /* This is the outermost construct with this reduction,
7472 see if there's a mapping for it. */
7473 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7474 && maybe_lookup_field (orig, outer) && !is_private)
7476 ref_to_res = build_receiver_ref (orig, false, outer);
7477 if (omp_privatize_by_reference (orig))
7478 ref_to_res = build_simple_mem_ref (ref_to_res);
7480 tree type = TREE_TYPE (var);
7481 if (POINTER_TYPE_P (type))
7482 type = TREE_TYPE (type);
7484 outgoing = var;
7485 incoming = omp_reduction_init_op (loc, rcode, type);
7487 else
7489 /* Try to look at enclosing contexts for reduction var,
7490 use original if no mapping found. */
7491 tree t = NULL_TREE;
7492 omp_context *c = ctx->outer;
7493 while (c && !t)
7495 t = maybe_lookup_decl (orig, c);
7496 c = c->outer;
7498 incoming = outgoing = (t ? t : orig);
7501 has_outer_reduction:;
7504 if (!ref_to_res)
7505 ref_to_res = integer_zero_node;
7507 if (omp_privatize_by_reference (orig))
7509 tree type = TREE_TYPE (var);
7510 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7512 if (!inner)
7514 tree x = create_tmp_var (TREE_TYPE (type), id);
7515 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7518 v1 = create_tmp_var (type, id);
7519 v2 = create_tmp_var (type, id);
7520 v3 = create_tmp_var (type, id);
7522 gimplify_assign (v1, var, fork_seq);
7523 gimplify_assign (v2, var, fork_seq);
7524 gimplify_assign (v3, var, fork_seq);
7526 var = build_simple_mem_ref (var);
7527 v1 = build_simple_mem_ref (v1);
7528 v2 = build_simple_mem_ref (v2);
7529 v3 = build_simple_mem_ref (v3);
7530 outgoing = build_simple_mem_ref (outgoing);
7532 if (!TREE_CONSTANT (incoming))
7533 incoming = build_simple_mem_ref (incoming);
7535 else
7536 /* Note that 'var' might be a mem ref. */
7537 v1 = v2 = v3 = var;
7539 /* Determine position in reduction buffer, which may be used
7540 by target. The parser has ensured that this is not a
7541 variable-sized type. */
7542 fixed_size_mode mode
7543 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7544 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7545 offset = (offset + align - 1) & ~(align - 1);
7546 tree off = build_int_cst (sizetype, offset);
7547 offset += GET_MODE_SIZE (mode);
7549 if (!init_code)
7551 init_code = build_int_cst (integer_type_node,
7552 IFN_GOACC_REDUCTION_INIT);
7553 fini_code = build_int_cst (integer_type_node,
7554 IFN_GOACC_REDUCTION_FINI);
7555 setup_code = build_int_cst (integer_type_node,
7556 IFN_GOACC_REDUCTION_SETUP);
7557 teardown_code = build_int_cst (integer_type_node,
7558 IFN_GOACC_REDUCTION_TEARDOWN);
7561 tree setup_call
7562 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7563 TREE_TYPE (var), 6, setup_code,
7564 unshare_expr (ref_to_res),
7565 unshare_expr (incoming),
7566 level, op, off);
7567 tree init_call
7568 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7569 TREE_TYPE (var), 6, init_code,
7570 unshare_expr (ref_to_res),
7571 unshare_expr (v1), level, op, off);
7572 tree fini_call
7573 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7574 TREE_TYPE (var), 6, fini_code,
7575 unshare_expr (ref_to_res),
7576 unshare_expr (v2), level, op, off);
7577 tree teardown_call
7578 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7579 TREE_TYPE (var), 6, teardown_code,
7580 ref_to_res, unshare_expr (v3),
7581 level, op, off);
7583 gimplify_assign (unshare_expr (v1), setup_call, &before_fork);
7584 gimplify_assign (unshare_expr (v2), init_call, &after_fork);
7585 gimplify_assign (unshare_expr (v3), fini_call, &before_join);
7586 gimplify_assign (unshare_expr (outgoing), teardown_call, &after_join);
7589 /* Now stitch things together. */
7590 gimple_seq_add_seq (fork_seq, before_fork);
7591 if (private_marker)
7592 gimple_seq_add_stmt (fork_seq, private_marker);
7593 if (fork)
7594 gimple_seq_add_stmt (fork_seq, fork);
7595 gimple_seq_add_seq (fork_seq, after_fork);
7597 gimple_seq_add_seq (join_seq, before_join);
7598 if (join)
7599 gimple_seq_add_stmt (join_seq, join);
7600 gimple_seq_add_seq (join_seq, after_join);
7603 /* Generate code to implement the REDUCTION clauses, append it
7604 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7605 that should be emitted also inside of the critical section,
7606 in that case clear *CLIST afterwards, otherwise leave it as is
7607 and let the caller emit it itself. */
7609 static void
7610 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7611 gimple_seq *clist, omp_context *ctx)
7613 gimple_seq sub_seq = NULL;
7614 gimple *stmt;
7615 tree x, c;
7616 int count = 0;
7618 /* OpenACC loop reductions are handled elsewhere. */
7619 if (is_gimple_omp_oacc (ctx->stmt))
7620 return;
7622 /* SIMD reductions are handled in lower_rec_input_clauses. */
7623 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7624 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7625 return;
7627 /* inscan reductions are handled elsewhere. */
7628 if (ctx->scan_inclusive || ctx->scan_exclusive)
7629 return;
7631 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7632 update in that case, otherwise use a lock. */
7633 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7634 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7635 && !OMP_CLAUSE_REDUCTION_TASK (c))
7637 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7638 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7640 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7641 count = -1;
7642 break;
7644 count++;
7647 if (count == 0)
7648 return;
7650 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7652 tree var, ref, new_var, orig_var;
7653 enum tree_code code;
7654 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7656 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7657 || OMP_CLAUSE_REDUCTION_TASK (c))
7658 continue;
7660 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7661 orig_var = var = OMP_CLAUSE_DECL (c);
7662 if (TREE_CODE (var) == MEM_REF)
7664 var = TREE_OPERAND (var, 0);
7665 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7666 var = TREE_OPERAND (var, 0);
7667 if (TREE_CODE (var) == ADDR_EXPR)
7668 var = TREE_OPERAND (var, 0);
7669 else
7671 /* If this is a pointer or referenced based array
7672 section, the var could be private in the outer
7673 context e.g. on orphaned loop construct. Pretend this
7674 is private variable's outer reference. */
7675 ccode = OMP_CLAUSE_PRIVATE;
7676 if (INDIRECT_REF_P (var))
7677 var = TREE_OPERAND (var, 0);
7679 orig_var = var;
7680 if (is_variable_sized (var))
7682 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7683 var = DECL_VALUE_EXPR (var);
7684 gcc_assert (INDIRECT_REF_P (var));
7685 var = TREE_OPERAND (var, 0);
7686 gcc_assert (DECL_P (var));
7689 new_var = lookup_decl (var, ctx);
7690 if (var == OMP_CLAUSE_DECL (c)
7691 && omp_privatize_by_reference (var))
7692 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7693 ref = build_outer_var_ref (var, ctx, ccode);
7694 code = OMP_CLAUSE_REDUCTION_CODE (c);
7696 /* reduction(-:var) sums up the partial results, so it acts
7697 identically to reduction(+:var). */
7698 if (code == MINUS_EXPR)
7699 code = PLUS_EXPR;
7701 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7702 if (count == 1)
7704 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7706 addr = save_expr (addr);
7707 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7708 tree new_var2 = new_var;
7709 tree ref2 = ref;
7710 if (is_truth_op)
7712 tree zero = build_zero_cst (TREE_TYPE (new_var));
7713 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7714 boolean_type_node, new_var, zero);
7715 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7716 ref, zero);
7718 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7719 new_var2);
7720 if (is_truth_op)
7721 x = fold_convert (TREE_TYPE (new_var), x);
7722 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7723 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7724 gimplify_and_add (x, stmt_seqp);
7725 return;
7727 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7729 tree d = OMP_CLAUSE_DECL (c);
7730 tree type = TREE_TYPE (d);
7731 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7732 tree i = create_tmp_var (TREE_TYPE (v));
7733 tree ptype = build_pointer_type (TREE_TYPE (type));
7734 tree bias = TREE_OPERAND (d, 1);
7735 d = TREE_OPERAND (d, 0);
7736 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7738 tree b = TREE_OPERAND (d, 1);
7739 b = maybe_lookup_decl (b, ctx);
7740 if (b == NULL)
7742 b = TREE_OPERAND (d, 1);
7743 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7745 if (integer_zerop (bias))
7746 bias = b;
7747 else
7749 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7750 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7751 TREE_TYPE (b), b, bias);
7753 d = TREE_OPERAND (d, 0);
7755 /* For ref build_outer_var_ref already performs this, so
7756 only new_var needs a dereference. */
7757 if (INDIRECT_REF_P (d))
7759 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7760 gcc_assert (omp_privatize_by_reference (var)
7761 && var == orig_var);
7763 else if (TREE_CODE (d) == ADDR_EXPR)
7765 if (orig_var == var)
7767 new_var = build_fold_addr_expr (new_var);
7768 ref = build_fold_addr_expr (ref);
7771 else
7773 gcc_assert (orig_var == var);
7774 if (omp_privatize_by_reference (var))
7775 ref = build_fold_addr_expr (ref);
7777 if (DECL_P (v))
7779 tree t = maybe_lookup_decl (v, ctx);
7780 if (t)
7781 v = t;
7782 else
7783 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7784 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7786 if (!integer_zerop (bias))
7788 bias = fold_convert_loc (clause_loc, sizetype, bias);
7789 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7790 TREE_TYPE (new_var), new_var,
7791 unshare_expr (bias));
7792 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7793 TREE_TYPE (ref), ref, bias);
7795 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7796 ref = fold_convert_loc (clause_loc, ptype, ref);
7797 tree m = create_tmp_var (ptype);
7798 gimplify_assign (m, new_var, stmt_seqp);
7799 new_var = m;
7800 m = create_tmp_var (ptype);
7801 gimplify_assign (m, ref, stmt_seqp);
7802 ref = m;
7803 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7804 tree body = create_artificial_label (UNKNOWN_LOCATION);
7805 tree end = create_artificial_label (UNKNOWN_LOCATION);
7806 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7807 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7808 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7809 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7811 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7812 tree decl_placeholder
7813 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7814 SET_DECL_VALUE_EXPR (placeholder, out);
7815 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7816 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7817 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7818 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7819 gimple_seq_add_seq (&sub_seq,
7820 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7821 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7822 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7823 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7825 else
7827 tree out2 = out;
7828 tree priv2 = priv;
7829 if (is_truth_op)
7831 tree zero = build_zero_cst (TREE_TYPE (out));
7832 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7833 boolean_type_node, out, zero);
7834 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7835 boolean_type_node, priv, zero);
7837 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7838 if (is_truth_op)
7839 x = fold_convert (TREE_TYPE (out), x);
7840 out = unshare_expr (out);
7841 gimplify_assign (out, x, &sub_seq);
7843 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7844 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7845 gimple_seq_add_stmt (&sub_seq, g);
7846 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7847 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7848 gimple_seq_add_stmt (&sub_seq, g);
7849 g = gimple_build_assign (i, PLUS_EXPR, i,
7850 build_int_cst (TREE_TYPE (i), 1));
7851 gimple_seq_add_stmt (&sub_seq, g);
7852 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7853 gimple_seq_add_stmt (&sub_seq, g);
7854 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7856 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7858 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7860 if (omp_privatize_by_reference (var)
7861 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7862 TREE_TYPE (ref)))
7863 ref = build_fold_addr_expr_loc (clause_loc, ref);
7864 SET_DECL_VALUE_EXPR (placeholder, ref);
7865 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7866 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7867 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7868 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7869 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7871 else
7873 tree new_var2 = new_var;
7874 tree ref2 = ref;
7875 if (is_truth_op)
7877 tree zero = build_zero_cst (TREE_TYPE (new_var));
7878 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7879 boolean_type_node, new_var, zero);
7880 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7881 ref, zero);
7883 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7884 if (is_truth_op)
7885 x = fold_convert (TREE_TYPE (new_var), x);
7886 ref = build_outer_var_ref (var, ctx);
7887 gimplify_assign (ref, x, &sub_seq);
7891 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7893 gimple_seq_add_stmt (stmt_seqp, stmt);
7895 gimple_seq_add_seq (stmt_seqp, sub_seq);
7897 if (clist)
7899 gimple_seq_add_seq (stmt_seqp, *clist);
7900 *clist = NULL;
7903 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7905 gimple_seq_add_stmt (stmt_seqp, stmt);
7909 /* Generate code to implement the COPYPRIVATE clauses. */
7911 static void
7912 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7913 omp_context *ctx)
7915 tree c;
7917 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7919 tree var, new_var, ref, x;
7920 bool by_ref;
7921 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7923 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7924 continue;
7926 var = OMP_CLAUSE_DECL (c);
7927 by_ref = use_pointer_for_field (var, NULL);
7929 ref = build_sender_ref (var, ctx);
7930 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7931 if (by_ref)
7933 x = build_fold_addr_expr_loc (clause_loc, new_var);
7934 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7936 gimplify_assign (ref, x, slist);
7938 ref = build_receiver_ref (var, false, ctx);
7939 if (by_ref)
7941 ref = fold_convert_loc (clause_loc,
7942 build_pointer_type (TREE_TYPE (new_var)),
7943 ref);
7944 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7946 if (omp_privatize_by_reference (var))
7948 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7949 ref = build_simple_mem_ref_loc (clause_loc, ref);
7950 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7952 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7953 gimplify_and_add (x, rlist);
7958 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7959 and REDUCTION from the sender (aka parent) side. */
7961 static void
7962 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7963 omp_context *ctx)
7965 tree c, t;
7966 int ignored_looptemp = 0;
7967 bool is_taskloop = false;
7969 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7970 by GOMP_taskloop. */
7971 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7973 ignored_looptemp = 2;
7974 is_taskloop = true;
7977 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7979 tree val, ref, x, var;
7980 bool by_ref, do_in = false, do_out = false;
7981 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7983 switch (OMP_CLAUSE_CODE (c))
7985 case OMP_CLAUSE_PRIVATE:
7986 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7987 break;
7988 continue;
7989 case OMP_CLAUSE_FIRSTPRIVATE:
7990 case OMP_CLAUSE_COPYIN:
7991 case OMP_CLAUSE_LASTPRIVATE:
7992 case OMP_CLAUSE_IN_REDUCTION:
7993 case OMP_CLAUSE__REDUCTEMP_:
7994 break;
7995 case OMP_CLAUSE_REDUCTION:
7996 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7997 continue;
7998 break;
7999 case OMP_CLAUSE_SHARED:
8000 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8001 break;
8002 continue;
8003 case OMP_CLAUSE__LOOPTEMP_:
8004 if (ignored_looptemp)
8006 ignored_looptemp--;
8007 continue;
8009 break;
8010 default:
8011 continue;
8014 val = OMP_CLAUSE_DECL (c);
8015 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8016 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
8017 && TREE_CODE (val) == MEM_REF)
8019 val = TREE_OPERAND (val, 0);
8020 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
8021 val = TREE_OPERAND (val, 0);
8022 if (INDIRECT_REF_P (val)
8023 || TREE_CODE (val) == ADDR_EXPR)
8024 val = TREE_OPERAND (val, 0);
8025 if (is_variable_sized (val))
8026 continue;
8029 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8030 outer taskloop region. */
8031 omp_context *ctx_for_o = ctx;
8032 if (is_taskloop
8033 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8034 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8035 ctx_for_o = ctx->outer;
8037 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
8039 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8040 && is_global_var (var)
8041 && (val == OMP_CLAUSE_DECL (c)
8042 || !is_task_ctx (ctx)
8043 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8044 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8045 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8046 != POINTER_TYPE)))))
8047 continue;
8049 t = omp_member_access_dummy_var (var);
8050 if (t)
8052 var = DECL_VALUE_EXPR (var);
8053 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
8054 if (o != t)
8055 var = unshare_and_remap (var, t, o);
8056 else
8057 var = unshare_expr (var);
8060 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8062 /* Handle taskloop firstprivate/lastprivate, where the
8063 lastprivate on GIMPLE_OMP_TASK is represented as
8064 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8065 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
8066 x = omp_build_component_ref (ctx->sender_decl, f);
8067 if (use_pointer_for_field (val, ctx))
8068 var = build_fold_addr_expr (var);
8069 gimplify_assign (x, var, ilist);
8070 DECL_ABSTRACT_ORIGIN (f) = NULL;
8071 continue;
8074 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8075 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8076 || val == OMP_CLAUSE_DECL (c))
8077 && is_variable_sized (val))
8078 continue;
8079 by_ref = use_pointer_for_field (val, NULL);
8081 switch (OMP_CLAUSE_CODE (c))
8083 case OMP_CLAUSE_FIRSTPRIVATE:
8084 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8085 && !by_ref
8086 && is_task_ctx (ctx))
8087 suppress_warning (var);
8088 do_in = true;
8089 break;
8091 case OMP_CLAUSE_PRIVATE:
8092 case OMP_CLAUSE_COPYIN:
8093 case OMP_CLAUSE__LOOPTEMP_:
8094 case OMP_CLAUSE__REDUCTEMP_:
8095 do_in = true;
8096 break;
8098 case OMP_CLAUSE_LASTPRIVATE:
8099 if (by_ref || omp_privatize_by_reference (val))
8101 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8102 continue;
8103 do_in = true;
8105 else
8107 do_out = true;
8108 if (lang_hooks.decls.omp_private_outer_ref (val))
8109 do_in = true;
8111 break;
8113 case OMP_CLAUSE_REDUCTION:
8114 case OMP_CLAUSE_IN_REDUCTION:
8115 do_in = true;
8116 if (val == OMP_CLAUSE_DECL (c))
8118 if (is_task_ctx (ctx))
8119 by_ref = use_pointer_for_field (val, ctx);
8120 else
8121 do_out = !(by_ref || omp_privatize_by_reference (val));
8123 else
8124 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8125 break;
8127 default:
8128 gcc_unreachable ();
8131 if (do_in)
8133 ref = build_sender_ref (val, ctx);
8134 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8135 gimplify_assign (ref, x, ilist);
8136 if (is_task_ctx (ctx))
8137 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8140 if (do_out)
8142 ref = build_sender_ref (val, ctx);
8143 gimplify_assign (var, ref, olist);
8148 /* Generate code to implement SHARED from the sender (aka parent)
8149 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8150 list things that got automatically shared. */
8152 static void
8153 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8155 tree var, ovar, nvar, t, f, x, record_type;
8157 if (ctx->record_type == NULL)
8158 return;
8160 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8161 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8163 ovar = DECL_ABSTRACT_ORIGIN (f);
8164 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8165 continue;
8167 nvar = maybe_lookup_decl (ovar, ctx);
8168 if (!nvar
8169 || !DECL_HAS_VALUE_EXPR_P (nvar)
8170 || (ctx->allocate_map
8171 && ctx->allocate_map->get (ovar)))
8172 continue;
8174 /* If CTX is a nested parallel directive. Find the immediately
8175 enclosing parallel or workshare construct that contains a
8176 mapping for OVAR. */
8177 var = lookup_decl_in_outer_ctx (ovar, ctx);
8179 t = omp_member_access_dummy_var (var);
8180 if (t)
8182 var = DECL_VALUE_EXPR (var);
8183 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8184 if (o != t)
8185 var = unshare_and_remap (var, t, o);
8186 else
8187 var = unshare_expr (var);
8190 if (use_pointer_for_field (ovar, ctx))
8192 x = build_sender_ref (ovar, ctx);
8193 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8194 && TREE_TYPE (f) == TREE_TYPE (ovar))
8196 gcc_assert (is_parallel_ctx (ctx)
8197 && DECL_ARTIFICIAL (ovar));
8198 /* _condtemp_ clause. */
8199 var = build_constructor (TREE_TYPE (x), NULL);
8201 else
8202 var = build_fold_addr_expr (var);
8203 gimplify_assign (x, var, ilist);
8205 else
8207 x = build_sender_ref (ovar, ctx);
8208 gimplify_assign (x, var, ilist);
8210 if (!TREE_READONLY (var)
8211 /* We don't need to receive a new reference to a result
8212 or parm decl. In fact we may not store to it as we will
8213 invalidate any pending RSO and generate wrong gimple
8214 during inlining. */
8215 && !((TREE_CODE (var) == RESULT_DECL
8216 || TREE_CODE (var) == PARM_DECL)
8217 && DECL_BY_REFERENCE (var)))
8219 x = build_sender_ref (ovar, ctx);
8220 gimplify_assign (var, x, olist);
8226 /* Emit an OpenACC head marker call, encapulating the partitioning and
8227 other information that must be processed by the target compiler.
8228 Return the maximum number of dimensions the associated loop might
8229 be partitioned over. */
8231 static unsigned
8232 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8233 gimple_seq *seq, omp_context *ctx)
8235 unsigned levels = 0;
8236 unsigned tag = 0;
8237 tree gang_static = NULL_TREE;
8238 auto_vec<tree, 5> args;
8240 args.quick_push (build_int_cst
8241 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8242 args.quick_push (ddvar);
8243 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8245 switch (OMP_CLAUSE_CODE (c))
8247 case OMP_CLAUSE_GANG:
8248 tag |= OLF_DIM_GANG;
8249 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8250 /* static:* is represented by -1, and we can ignore it, as
8251 scheduling is always static. */
8252 if (gang_static && integer_minus_onep (gang_static))
8253 gang_static = NULL_TREE;
8254 levels++;
8255 break;
8257 case OMP_CLAUSE_WORKER:
8258 tag |= OLF_DIM_WORKER;
8259 levels++;
8260 break;
8262 case OMP_CLAUSE_VECTOR:
8263 tag |= OLF_DIM_VECTOR;
8264 levels++;
8265 break;
8267 case OMP_CLAUSE_SEQ:
8268 tag |= OLF_SEQ;
8269 break;
8271 case OMP_CLAUSE_AUTO:
8272 tag |= OLF_AUTO;
8273 break;
8275 case OMP_CLAUSE_INDEPENDENT:
8276 tag |= OLF_INDEPENDENT;
8277 break;
8279 case OMP_CLAUSE_TILE:
8280 tag |= OLF_TILE;
8281 break;
8283 case OMP_CLAUSE_REDUCTION:
8284 tag |= OLF_REDUCTION;
8285 break;
8287 default:
8288 continue;
8292 if (gang_static)
8294 if (DECL_P (gang_static))
8295 gang_static = build_outer_var_ref (gang_static, ctx);
8296 tag |= OLF_GANG_STATIC;
8299 omp_context *tgt = enclosing_target_ctx (ctx);
8300 if (!tgt || is_oacc_parallel_or_serial (tgt))
8302 else if (is_oacc_kernels (tgt))
8303 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8304 gcc_unreachable ();
8305 else if (is_oacc_kernels_decomposed_part (tgt))
8307 else
8308 gcc_unreachable ();
8310 /* In a parallel region, loops are implicitly INDEPENDENT. */
8311 if (!tgt || is_oacc_parallel_or_serial (tgt))
8312 tag |= OLF_INDEPENDENT;
8314 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8315 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8316 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8318 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8319 gcc_assert (!(tag & OLF_AUTO));
8322 if (tag & OLF_TILE)
8323 /* Tiling could use all 3 levels. */
8324 levels = 3;
8325 else
8327 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8328 Ensure at least one level, or 2 for possible auto
8329 partitioning */
8330 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8331 << OLF_DIM_BASE) | OLF_SEQ));
8333 if (levels < 1u + maybe_auto)
8334 levels = 1u + maybe_auto;
8337 args.quick_push (build_int_cst (integer_type_node, levels));
8338 args.quick_push (build_int_cst (integer_type_node, tag));
8339 if (gang_static)
8340 args.quick_push (gang_static);
8342 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8343 gimple_set_location (call, loc);
8344 gimple_set_lhs (call, ddvar);
8345 gimple_seq_add_stmt (seq, call);
8347 return levels;
8350 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8351 partitioning level of the enclosed region. */
8353 static void
8354 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8355 tree tofollow, gimple_seq *seq)
8357 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8358 : IFN_UNIQUE_OACC_TAIL_MARK);
8359 tree marker = build_int_cst (integer_type_node, marker_kind);
8360 int nargs = 2 + (tofollow != NULL_TREE);
8361 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8362 marker, ddvar, tofollow);
8363 gimple_set_location (call, loc);
8364 gimple_set_lhs (call, ddvar);
8365 gimple_seq_add_stmt (seq, call);
8368 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8369 the loop clauses, from which we extract reductions. Initialize
8370 HEAD and TAIL. */
8372 static void
8373 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8374 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8376 bool inner = false;
8377 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8378 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8380 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8382 if (private_marker)
8384 gimple_set_location (private_marker, loc);
8385 gimple_call_set_lhs (private_marker, ddvar);
8386 gimple_call_set_arg (private_marker, 1, ddvar);
8389 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8390 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8392 gcc_assert (count);
8393 for (unsigned done = 1; count; count--, done++)
8395 gimple_seq fork_seq = NULL;
8396 gimple_seq join_seq = NULL;
8398 tree place = build_int_cst (integer_type_node, -1);
8399 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8400 fork_kind, ddvar, place);
8401 gimple_set_location (fork, loc);
8402 gimple_set_lhs (fork, ddvar);
8404 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8405 join_kind, ddvar, place);
8406 gimple_set_location (join, loc);
8407 gimple_set_lhs (join, ddvar);
8409 /* Mark the beginning of this level sequence. */
8410 if (inner)
8411 lower_oacc_loop_marker (loc, ddvar, true,
8412 build_int_cst (integer_type_node, count),
8413 &fork_seq);
8414 lower_oacc_loop_marker (loc, ddvar, false,
8415 build_int_cst (integer_type_node, done),
8416 &join_seq);
8418 lower_oacc_reductions (loc, clauses, place, inner,
8419 fork, (count == 1) ? private_marker : NULL,
8420 join, &fork_seq, &join_seq, ctx);
8422 /* Append this level to head. */
8423 gimple_seq_add_seq (head, fork_seq);
8424 /* Prepend it to tail. */
8425 gimple_seq_add_seq (&join_seq, *tail);
8426 *tail = join_seq;
8428 inner = true;
8431 /* Mark the end of the sequence. */
8432 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8433 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8436 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8437 catch handler and return it. This prevents programs from violating the
8438 structured block semantics with throws. */
8440 static gimple_seq
8441 maybe_catch_exception (gimple_seq body)
8443 gimple *g;
8444 tree decl;
8446 if (!flag_exceptions)
8447 return body;
8449 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8450 decl = lang_hooks.eh_protect_cleanup_actions ();
8451 else
8452 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8454 g = gimple_build_eh_must_not_throw (decl);
8455 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8456 GIMPLE_TRY_CATCH);
8458 return gimple_seq_alloc_with_stmt (g);
8462 /* Routines to lower OMP directives into OMP-GIMPLE. */
8464 /* If ctx is a worksharing context inside of a cancellable parallel
8465 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8466 and conditional branch to parallel's cancel_label to handle
8467 cancellation in the implicit barrier. */
8469 static void
8470 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8471 gimple_seq *body)
8473 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8474 if (gimple_omp_return_nowait_p (omp_return))
8475 return;
8476 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8477 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8478 && outer->cancellable)
8480 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8481 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8482 tree lhs = create_tmp_var (c_bool_type);
8483 gimple_omp_return_set_lhs (omp_return, lhs);
8484 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8485 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8486 fold_convert (c_bool_type,
8487 boolean_false_node),
8488 outer->cancel_label, fallthru_label);
8489 gimple_seq_add_stmt (body, g);
8490 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8492 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8493 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8494 return;
8497 /* Find the first task_reduction or reduction clause or return NULL
8498 if there are none. */
8500 static inline tree
8501 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8502 enum omp_clause_code ccode)
8504 while (1)
8506 clauses = omp_find_clause (clauses, ccode);
8507 if (clauses == NULL_TREE)
8508 return NULL_TREE;
8509 if (ccode != OMP_CLAUSE_REDUCTION
8510 || code == OMP_TASKLOOP
8511 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8512 return clauses;
8513 clauses = OMP_CLAUSE_CHAIN (clauses);
8517 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8518 gimple_seq *, gimple_seq *);
8520 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8521 CTX is the enclosing OMP context for the current statement. */
8523 static void
8524 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8526 tree block, control;
8527 gimple_stmt_iterator tgsi;
8528 gomp_sections *stmt;
8529 gimple *t;
8530 gbind *new_stmt, *bind;
8531 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8533 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8535 push_gimplify_context ();
8537 dlist = NULL;
8538 ilist = NULL;
8540 tree rclauses
8541 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8542 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8543 tree rtmp = NULL_TREE;
8544 if (rclauses)
8546 tree type = build_pointer_type (pointer_sized_int_node);
8547 tree temp = create_tmp_var (type);
8548 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8549 OMP_CLAUSE_DECL (c) = temp;
8550 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8551 gimple_omp_sections_set_clauses (stmt, c);
8552 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8553 gimple_omp_sections_clauses (stmt),
8554 &ilist, &tred_dlist);
8555 rclauses = c;
8556 rtmp = make_ssa_name (type);
8557 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8560 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8561 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8563 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8564 &ilist, &dlist, ctx, NULL);
8566 control = create_tmp_var (unsigned_type_node, ".section");
8567 gimple_omp_sections_set_control (stmt, control);
8569 new_body = gimple_omp_body (stmt);
8570 gimple_omp_set_body (stmt, NULL);
8571 tgsi = gsi_start (new_body);
8572 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8574 omp_context *sctx;
8575 gimple *sec_start;
8577 sec_start = gsi_stmt (tgsi);
8578 sctx = maybe_lookup_ctx (sec_start);
8579 gcc_assert (sctx);
8581 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8582 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8583 GSI_CONTINUE_LINKING);
8584 gimple_omp_set_body (sec_start, NULL);
8586 if (gsi_one_before_end_p (tgsi))
8588 gimple_seq l = NULL;
8589 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8590 &ilist, &l, &clist, ctx);
8591 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8592 gimple_omp_section_set_last (sec_start);
8595 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8596 GSI_CONTINUE_LINKING);
8599 block = make_node (BLOCK);
8600 bind = gimple_build_bind (NULL, new_body, block);
8602 olist = NULL;
8603 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8604 &clist, ctx);
8605 if (clist)
8607 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8608 gcall *g = gimple_build_call (fndecl, 0);
8609 gimple_seq_add_stmt (&olist, g);
8610 gimple_seq_add_seq (&olist, clist);
8611 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8612 g = gimple_build_call (fndecl, 0);
8613 gimple_seq_add_stmt (&olist, g);
8616 block = make_node (BLOCK);
8617 new_stmt = gimple_build_bind (NULL, NULL, block);
8618 gsi_replace (gsi_p, new_stmt, true);
8620 pop_gimplify_context (new_stmt);
8621 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8622 BLOCK_VARS (block) = gimple_bind_vars (bind);
8623 if (BLOCK_VARS (block))
8624 TREE_USED (block) = 1;
8626 new_body = NULL;
8627 gimple_seq_add_seq (&new_body, ilist);
8628 gimple_seq_add_stmt (&new_body, stmt);
8629 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8630 gimple_seq_add_stmt (&new_body, bind);
8632 t = gimple_build_omp_continue (control, control);
8633 gimple_seq_add_stmt (&new_body, t);
8635 gimple_seq_add_seq (&new_body, olist);
8636 if (ctx->cancellable)
8637 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8638 gimple_seq_add_seq (&new_body, dlist);
8640 new_body = maybe_catch_exception (new_body);
8642 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8643 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8644 t = gimple_build_omp_return (nowait);
8645 gimple_seq_add_stmt (&new_body, t);
8646 gimple_seq_add_seq (&new_body, tred_dlist);
8647 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8649 if (rclauses)
8650 OMP_CLAUSE_DECL (rclauses) = rtmp;
8652 gimple_bind_set_body (new_stmt, new_body);
8656 /* A subroutine of lower_omp_single. Expand the simple form of
8657 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8659 if (GOMP_single_start ())
8660 BODY;
8661 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8663 FIXME. It may be better to delay expanding the logic of this until
8664 pass_expand_omp. The expanded logic may make the job more difficult
8665 to a synchronization analysis pass. */
8667 static void
8668 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8670 location_t loc = gimple_location (single_stmt);
8671 tree tlabel = create_artificial_label (loc);
8672 tree flabel = create_artificial_label (loc);
8673 gimple *call, *cond;
8674 tree lhs, decl;
8676 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8677 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8678 call = gimple_build_call (decl, 0);
8679 gimple_call_set_lhs (call, lhs);
8680 gimple_seq_add_stmt (pre_p, call);
8682 cond = gimple_build_cond (EQ_EXPR, lhs,
8683 fold_convert_loc (loc, TREE_TYPE (lhs),
8684 boolean_true_node),
8685 tlabel, flabel);
8686 gimple_seq_add_stmt (pre_p, cond);
8687 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8688 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8689 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8693 /* A subroutine of lower_omp_single. Expand the simple form of
8694 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8696 #pragma omp single copyprivate (a, b, c)
8698 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8701 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8703 BODY;
8704 copyout.a = a;
8705 copyout.b = b;
8706 copyout.c = c;
8707 GOMP_single_copy_end (&copyout);
8709 else
8711 a = copyout_p->a;
8712 b = copyout_p->b;
8713 c = copyout_p->c;
8715 GOMP_barrier ();
8718 FIXME. It may be better to delay expanding the logic of this until
8719 pass_expand_omp. The expanded logic may make the job more difficult
8720 to a synchronization analysis pass. */
8722 static void
8723 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8724 omp_context *ctx)
8726 tree ptr_type, t, l0, l1, l2, bfn_decl;
8727 gimple_seq copyin_seq;
8728 location_t loc = gimple_location (single_stmt);
8730 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8732 ptr_type = build_pointer_type (ctx->record_type);
8733 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8735 l0 = create_artificial_label (loc);
8736 l1 = create_artificial_label (loc);
8737 l2 = create_artificial_label (loc);
8739 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8740 t = build_call_expr_loc (loc, bfn_decl, 0);
8741 t = fold_convert_loc (loc, ptr_type, t);
8742 gimplify_assign (ctx->receiver_decl, t, pre_p);
8744 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8745 build_int_cst (ptr_type, 0));
8746 t = build3 (COND_EXPR, void_type_node, t,
8747 build_and_jump (&l0), build_and_jump (&l1));
8748 gimplify_and_add (t, pre_p);
8750 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8752 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8754 copyin_seq = NULL;
8755 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8756 &copyin_seq, ctx);
8758 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8759 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8760 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8761 gimplify_and_add (t, pre_p);
8763 t = build_and_jump (&l2);
8764 gimplify_and_add (t, pre_p);
8766 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8768 gimple_seq_add_seq (pre_p, copyin_seq);
8770 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8774 /* Expand code for an OpenMP single directive. */
8776 static void
8777 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8779 tree block;
8780 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8781 gbind *bind;
8782 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8784 push_gimplify_context ();
8786 block = make_node (BLOCK);
8787 bind = gimple_build_bind (NULL, NULL, block);
8788 gsi_replace (gsi_p, bind, true);
8789 bind_body = NULL;
8790 dlist = NULL;
8791 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8792 &bind_body, &dlist, ctx, NULL);
8793 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8795 gimple_seq_add_stmt (&bind_body, single_stmt);
8797 if (ctx->record_type)
8798 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8799 else
8800 lower_omp_single_simple (single_stmt, &bind_body);
8802 gimple_omp_set_body (single_stmt, NULL);
8804 gimple_seq_add_seq (&bind_body, dlist);
8806 bind_body = maybe_catch_exception (bind_body);
8808 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8809 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8810 gimple *g = gimple_build_omp_return (nowait);
8811 gimple_seq_add_stmt (&bind_body_tail, g);
8812 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8813 if (ctx->record_type)
8815 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8816 tree clobber = build_clobber (ctx->record_type);
8817 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8818 clobber), GSI_SAME_STMT);
8820 gimple_seq_add_seq (&bind_body, bind_body_tail);
8821 gimple_bind_set_body (bind, bind_body);
8823 pop_gimplify_context (bind);
8825 gimple_bind_append_vars (bind, ctx->block_vars);
8826 BLOCK_VARS (block) = ctx->block_vars;
8827 if (BLOCK_VARS (block))
8828 TREE_USED (block) = 1;
8832 /* Lower code for an OMP scope directive. */
8834 static void
8835 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8837 tree block;
8838 gimple *scope_stmt = gsi_stmt (*gsi_p);
8839 gbind *bind;
8840 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8841 gimple_seq tred_dlist = NULL;
8843 push_gimplify_context ();
8845 block = make_node (BLOCK);
8846 bind = gimple_build_bind (NULL, NULL, block);
8847 gsi_replace (gsi_p, bind, true);
8848 bind_body = NULL;
8849 dlist = NULL;
8851 tree rclauses
8852 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8853 OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8854 if (rclauses)
8856 tree type = build_pointer_type (pointer_sized_int_node);
8857 tree temp = create_tmp_var (type);
8858 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8859 OMP_CLAUSE_DECL (c) = temp;
8860 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8861 gimple_omp_scope_set_clauses (scope_stmt, c);
8862 lower_omp_task_reductions (ctx, OMP_SCOPE,
8863 gimple_omp_scope_clauses (scope_stmt),
8864 &bind_body, &tred_dlist);
8865 rclauses = c;
8866 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8867 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8868 gimple_seq_add_stmt (&bind_body, stmt);
8871 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8872 &bind_body, &dlist, ctx, NULL);
8873 lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8875 gimple_seq_add_stmt (&bind_body, scope_stmt);
8877 gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8879 gimple_omp_set_body (scope_stmt, NULL);
8881 gimple_seq clist = NULL;
8882 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8883 &bind_body, &clist, ctx);
8884 if (clist)
8886 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8887 gcall *g = gimple_build_call (fndecl, 0);
8888 gimple_seq_add_stmt (&bind_body, g);
8889 gimple_seq_add_seq (&bind_body, clist);
8890 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8891 g = gimple_build_call (fndecl, 0);
8892 gimple_seq_add_stmt (&bind_body, g);
8895 gimple_seq_add_seq (&bind_body, dlist);
8897 bind_body = maybe_catch_exception (bind_body);
8899 bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
8900 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8901 gimple *g = gimple_build_omp_return (nowait);
8902 gimple_seq_add_stmt (&bind_body_tail, g);
8903 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
8904 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8905 if (ctx->record_type)
8907 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8908 tree clobber = build_clobber (ctx->record_type);
8909 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8910 clobber), GSI_SAME_STMT);
8912 gimple_seq_add_seq (&bind_body, bind_body_tail);
8914 gimple_bind_set_body (bind, bind_body);
8916 pop_gimplify_context (bind);
8918 gimple_bind_append_vars (bind, ctx->block_vars);
8919 BLOCK_VARS (block) = ctx->block_vars;
8920 if (BLOCK_VARS (block))
8921 TREE_USED (block) = 1;
8923 /* Expand code for an OpenMP master or masked directive. */
8925 static void
8926 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8928 tree block, lab = NULL, x, bfn_decl;
8929 gimple *stmt = gsi_stmt (*gsi_p);
8930 gbind *bind;
8931 location_t loc = gimple_location (stmt);
8932 gimple_seq tseq;
8933 tree filter = integer_zero_node;
8935 push_gimplify_context ();
8937 if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
8939 filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
8940 OMP_CLAUSE_FILTER);
8941 if (filter)
8942 filter = fold_convert (integer_type_node,
8943 OMP_CLAUSE_FILTER_EXPR (filter));
8944 else
8945 filter = integer_zero_node;
8947 block = make_node (BLOCK);
8948 bind = gimple_build_bind (NULL, NULL, block);
8949 gsi_replace (gsi_p, bind, true);
8950 gimple_bind_add_stmt (bind, stmt);
8952 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8953 x = build_call_expr_loc (loc, bfn_decl, 0);
8954 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
8955 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8956 tseq = NULL;
8957 gimplify_and_add (x, &tseq);
8958 gimple_bind_add_seq (bind, tseq);
8960 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8961 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8962 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8963 gimple_omp_set_body (stmt, NULL);
8965 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8967 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8969 pop_gimplify_context (bind);
8971 gimple_bind_append_vars (bind, ctx->block_vars);
8972 BLOCK_VARS (block) = ctx->block_vars;
8975 /* Helper function for lower_omp_task_reductions. For a specific PASS
8976 find out the current clause it should be processed, or return false
8977 if all have been processed already. */
8979 static inline bool
8980 omp_task_reduction_iterate (int pass, enum tree_code code,
8981 enum omp_clause_code ccode, tree *c, tree *decl,
8982 tree *type, tree *next)
8984 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8986 if (ccode == OMP_CLAUSE_REDUCTION
8987 && code != OMP_TASKLOOP
8988 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8989 continue;
8990 *decl = OMP_CLAUSE_DECL (*c);
8991 *type = TREE_TYPE (*decl);
8992 if (TREE_CODE (*decl) == MEM_REF)
8994 if (pass != 1)
8995 continue;
8997 else
8999 if (omp_privatize_by_reference (*decl))
9000 *type = TREE_TYPE (*type);
9001 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
9002 continue;
9004 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
9005 return true;
9007 *decl = NULL_TREE;
9008 *type = NULL_TREE;
9009 *next = NULL_TREE;
9010 return false;
9013 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9014 OMP_TASKGROUP only with task modifier). Register mapping of those in
9015 START sequence and reducing them and unregister them in the END sequence. */
9017 static void
9018 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
9019 gimple_seq *start, gimple_seq *end)
9021 enum omp_clause_code ccode
9022 = (code == OMP_TASKGROUP
9023 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
9024 tree cancellable = NULL_TREE;
9025 clauses = omp_task_reductions_find_first (clauses, code, ccode);
9026 if (clauses == NULL_TREE)
9027 return;
9028 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9030 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
9031 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
9032 && outer->cancellable)
9034 cancellable = error_mark_node;
9035 break;
9037 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
9038 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
9039 break;
9041 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
9042 tree *last = &TYPE_FIELDS (record_type);
9043 unsigned cnt = 0;
9044 if (cancellable)
9046 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9047 ptr_type_node);
9048 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9049 integer_type_node);
9050 *last = field;
9051 DECL_CHAIN (field) = ifield;
9052 last = &DECL_CHAIN (ifield);
9053 DECL_CONTEXT (field) = record_type;
9054 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9055 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9056 DECL_CONTEXT (ifield) = record_type;
9057 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9058 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9060 for (int pass = 0; pass < 2; pass++)
9062 tree decl, type, next;
9063 for (tree c = clauses;
9064 omp_task_reduction_iterate (pass, code, ccode,
9065 &c, &decl, &type, &next); c = next)
9067 ++cnt;
9068 tree new_type = type;
9069 if (ctx->outer)
9070 new_type = remap_type (type, &ctx->outer->cb);
9071 tree field
9072 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9073 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9074 new_type);
9075 if (DECL_P (decl) && type == TREE_TYPE (decl))
9077 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9078 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9079 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9081 else
9082 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9083 DECL_CONTEXT (field) = record_type;
9084 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9085 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9086 *last = field;
9087 last = &DECL_CHAIN (field);
9088 tree bfield
9089 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9090 boolean_type_node);
9091 DECL_CONTEXT (bfield) = record_type;
9092 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9093 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9094 *last = bfield;
9095 last = &DECL_CHAIN (bfield);
9098 *last = NULL_TREE;
9099 layout_type (record_type);
9101 /* Build up an array which registers with the runtime all the reductions
9102 and deregisters them at the end. Format documented in libgomp/task.c. */
9103 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9104 tree avar = create_tmp_var_raw (atype);
9105 gimple_add_tmp_var (avar);
9106 TREE_ADDRESSABLE (avar) = 1;
9107 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9108 NULL_TREE, NULL_TREE);
9109 tree t = build_int_cst (pointer_sized_int_node, cnt);
9110 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9111 gimple_seq seq = NULL;
9112 tree sz = fold_convert (pointer_sized_int_node,
9113 TYPE_SIZE_UNIT (record_type));
9114 int cachesz = 64;
9115 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9116 build_int_cst (pointer_sized_int_node, cachesz - 1));
9117 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9118 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9119 ctx->task_reductions.create (1 + cnt);
9120 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9121 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9122 ? sz : NULL_TREE);
9123 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9124 gimple_seq_add_seq (start, seq);
9125 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9126 NULL_TREE, NULL_TREE);
9127 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9128 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9129 NULL_TREE, NULL_TREE);
9130 t = build_int_cst (pointer_sized_int_node,
9131 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9132 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9133 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9134 NULL_TREE, NULL_TREE);
9135 t = build_int_cst (pointer_sized_int_node, -1);
9136 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9137 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9138 NULL_TREE, NULL_TREE);
9139 t = build_int_cst (pointer_sized_int_node, 0);
9140 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9142 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9143 and for each task reduction checks a bool right after the private variable
9144 within that thread's chunk; if the bool is clear, it hasn't been
9145 initialized and thus isn't going to be reduced nor destructed, otherwise
9146 reduce and destruct it. */
9147 tree idx = create_tmp_var (size_type_node);
9148 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9149 tree num_thr_sz = create_tmp_var (size_type_node);
9150 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9151 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9152 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9153 gimple *g;
9154 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9156 /* For worksharing constructs or scope, only perform it in the master
9157 thread, with the exception of cancelled implicit barriers - then only
9158 handle the current thread. */
9159 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9160 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9161 tree thr_num = create_tmp_var (integer_type_node);
9162 g = gimple_build_call (t, 0);
9163 gimple_call_set_lhs (g, thr_num);
9164 gimple_seq_add_stmt (end, g);
9165 if (cancellable)
9167 tree c;
9168 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9169 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9170 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9171 if (code == OMP_FOR)
9172 c = gimple_omp_for_clauses (ctx->stmt);
9173 else if (code == OMP_SECTIONS)
9174 c = gimple_omp_sections_clauses (ctx->stmt);
9175 else /* if (code == OMP_SCOPE) */
9176 c = gimple_omp_scope_clauses (ctx->stmt);
9177 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9178 cancellable = c;
9179 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9180 lab5, lab6);
9181 gimple_seq_add_stmt (end, g);
9182 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9183 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9184 gimple_seq_add_stmt (end, g);
9185 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9186 build_one_cst (TREE_TYPE (idx)));
9187 gimple_seq_add_stmt (end, g);
9188 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9189 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9191 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9192 gimple_seq_add_stmt (end, g);
9193 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9195 if (code != OMP_PARALLEL)
9197 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9198 tree num_thr = create_tmp_var (integer_type_node);
9199 g = gimple_build_call (t, 0);
9200 gimple_call_set_lhs (g, num_thr);
9201 gimple_seq_add_stmt (end, g);
9202 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9203 gimple_seq_add_stmt (end, g);
9204 if (cancellable)
9205 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9207 else
9209 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9210 OMP_CLAUSE__REDUCTEMP_);
9211 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9212 t = fold_convert (size_type_node, t);
9213 gimplify_assign (num_thr_sz, t, end);
9215 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9216 NULL_TREE, NULL_TREE);
9217 tree data = create_tmp_var (pointer_sized_int_node);
9218 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9219 if (code == OMP_TASKLOOP)
9221 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9222 g = gimple_build_cond (NE_EXPR, data,
9223 build_zero_cst (pointer_sized_int_node),
9224 lab1, lab7);
9225 gimple_seq_add_stmt (end, g);
9227 gimple_seq_add_stmt (end, gimple_build_label (lab1));
9228 tree ptr;
9229 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9230 ptr = create_tmp_var (build_pointer_type (record_type));
9231 else
9232 ptr = create_tmp_var (ptr_type_node);
9233 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9235 tree field = TYPE_FIELDS (record_type);
9236 cnt = 0;
9237 if (cancellable)
9238 field = DECL_CHAIN (DECL_CHAIN (field));
9239 for (int pass = 0; pass < 2; pass++)
9241 tree decl, type, next;
9242 for (tree c = clauses;
9243 omp_task_reduction_iterate (pass, code, ccode,
9244 &c, &decl, &type, &next); c = next)
9246 tree var = decl, ref;
9247 if (TREE_CODE (decl) == MEM_REF)
9249 var = TREE_OPERAND (var, 0);
9250 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9251 var = TREE_OPERAND (var, 0);
9252 tree v = var;
9253 if (TREE_CODE (var) == ADDR_EXPR)
9254 var = TREE_OPERAND (var, 0);
9255 else if (INDIRECT_REF_P (var))
9256 var = TREE_OPERAND (var, 0);
9257 tree orig_var = var;
9258 if (is_variable_sized (var))
9260 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9261 var = DECL_VALUE_EXPR (var);
9262 gcc_assert (INDIRECT_REF_P (var));
9263 var = TREE_OPERAND (var, 0);
9264 gcc_assert (DECL_P (var));
9266 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9267 if (orig_var != var)
9268 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9269 else if (TREE_CODE (v) == ADDR_EXPR)
9270 t = build_fold_addr_expr (t);
9271 else if (INDIRECT_REF_P (v))
9272 t = build_fold_indirect_ref (t);
9273 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9275 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9276 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9277 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9279 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9280 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9281 fold_convert (size_type_node,
9282 TREE_OPERAND (decl, 1)));
9284 else
9286 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9287 if (!omp_privatize_by_reference (decl))
9288 t = build_fold_addr_expr (t);
9290 t = fold_convert (pointer_sized_int_node, t);
9291 seq = NULL;
9292 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9293 gimple_seq_add_seq (start, seq);
9294 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9295 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9296 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9297 t = unshare_expr (byte_position (field));
9298 t = fold_convert (pointer_sized_int_node, t);
9299 ctx->task_reduction_map->put (c, cnt);
9300 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9301 ? t : NULL_TREE);
9302 seq = NULL;
9303 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9304 gimple_seq_add_seq (start, seq);
9305 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9306 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9307 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9309 tree bfield = DECL_CHAIN (field);
9310 tree cond;
9311 if (code == OMP_PARALLEL
9312 || code == OMP_FOR
9313 || code == OMP_SECTIONS
9314 || code == OMP_SCOPE)
9315 /* In parallel, worksharing or scope all threads unconditionally
9316 initialize all their task reduction private variables. */
9317 cond = boolean_true_node;
9318 else if (TREE_TYPE (ptr) == ptr_type_node)
9320 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9321 unshare_expr (byte_position (bfield)));
9322 seq = NULL;
9323 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9324 gimple_seq_add_seq (end, seq);
9325 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9326 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9327 build_int_cst (pbool, 0));
9329 else
9330 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9331 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9332 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9333 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9334 tree condv = create_tmp_var (boolean_type_node);
9335 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9336 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9337 lab3, lab4);
9338 gimple_seq_add_stmt (end, g);
9339 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9340 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9342 /* If this reduction doesn't need destruction and parallel
9343 has been cancelled, there is nothing to do for this
9344 reduction, so jump around the merge operation. */
9345 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9346 g = gimple_build_cond (NE_EXPR, cancellable,
9347 build_zero_cst (TREE_TYPE (cancellable)),
9348 lab4, lab5);
9349 gimple_seq_add_stmt (end, g);
9350 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9353 tree new_var;
9354 if (TREE_TYPE (ptr) == ptr_type_node)
9356 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9357 unshare_expr (byte_position (field)));
9358 seq = NULL;
9359 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9360 gimple_seq_add_seq (end, seq);
9361 tree pbool = build_pointer_type (TREE_TYPE (field));
9362 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9363 build_int_cst (pbool, 0));
9365 else
9366 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9367 build_simple_mem_ref (ptr), field, NULL_TREE);
9369 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9370 if (TREE_CODE (decl) != MEM_REF
9371 && omp_privatize_by_reference (decl))
9372 ref = build_simple_mem_ref (ref);
9373 /* reduction(-:var) sums up the partial results, so it acts
9374 identically to reduction(+:var). */
9375 if (rcode == MINUS_EXPR)
9376 rcode = PLUS_EXPR;
9377 if (TREE_CODE (decl) == MEM_REF)
9379 tree type = TREE_TYPE (new_var);
9380 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9381 tree i = create_tmp_var (TREE_TYPE (v));
9382 tree ptype = build_pointer_type (TREE_TYPE (type));
9383 if (DECL_P (v))
9385 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9386 tree vv = create_tmp_var (TREE_TYPE (v));
9387 gimplify_assign (vv, v, start);
9388 v = vv;
9390 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9391 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9392 new_var = build_fold_addr_expr (new_var);
9393 new_var = fold_convert (ptype, new_var);
9394 ref = fold_convert (ptype, ref);
9395 tree m = create_tmp_var (ptype);
9396 gimplify_assign (m, new_var, end);
9397 new_var = m;
9398 m = create_tmp_var (ptype);
9399 gimplify_assign (m, ref, end);
9400 ref = m;
9401 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9402 tree body = create_artificial_label (UNKNOWN_LOCATION);
9403 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9404 gimple_seq_add_stmt (end, gimple_build_label (body));
9405 tree priv = build_simple_mem_ref (new_var);
9406 tree out = build_simple_mem_ref (ref);
9407 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9409 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9410 tree decl_placeholder
9411 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9412 tree lab6 = NULL_TREE;
9413 if (cancellable)
9415 /* If this reduction needs destruction and parallel
9416 has been cancelled, jump around the merge operation
9417 to the destruction. */
9418 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9419 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9420 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9421 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9422 lab6, lab5);
9423 gimple_seq_add_stmt (end, g);
9424 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9426 SET_DECL_VALUE_EXPR (placeholder, out);
9427 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9428 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9429 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9430 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9431 gimple_seq_add_seq (end,
9432 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9433 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9434 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9436 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9437 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9439 if (cancellable)
9440 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9441 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9442 if (x)
9444 gimple_seq tseq = NULL;
9445 gimplify_stmt (&x, &tseq);
9446 gimple_seq_add_seq (end, tseq);
9449 else
9451 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9452 out = unshare_expr (out);
9453 gimplify_assign (out, x, end);
9455 gimple *g
9456 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9457 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9458 gimple_seq_add_stmt (end, g);
9459 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9460 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9461 gimple_seq_add_stmt (end, g);
9462 g = gimple_build_assign (i, PLUS_EXPR, i,
9463 build_int_cst (TREE_TYPE (i), 1));
9464 gimple_seq_add_stmt (end, g);
9465 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9466 gimple_seq_add_stmt (end, g);
9467 gimple_seq_add_stmt (end, gimple_build_label (endl));
9469 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9471 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9472 tree oldv = NULL_TREE;
9473 tree lab6 = NULL_TREE;
9474 if (cancellable)
9476 /* If this reduction needs destruction and parallel
9477 has been cancelled, jump around the merge operation
9478 to the destruction. */
9479 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9480 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9481 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9482 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9483 lab6, lab5);
9484 gimple_seq_add_stmt (end, g);
9485 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9487 if (omp_privatize_by_reference (decl)
9488 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9489 TREE_TYPE (ref)))
9490 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9491 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9492 tree refv = create_tmp_var (TREE_TYPE (ref));
9493 gimplify_assign (refv, ref, end);
9494 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9495 SET_DECL_VALUE_EXPR (placeholder, ref);
9496 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9497 tree d = maybe_lookup_decl (decl, ctx);
9498 gcc_assert (d);
9499 if (DECL_HAS_VALUE_EXPR_P (d))
9500 oldv = DECL_VALUE_EXPR (d);
9501 if (omp_privatize_by_reference (var))
9503 tree v = fold_convert (TREE_TYPE (d),
9504 build_fold_addr_expr (new_var));
9505 SET_DECL_VALUE_EXPR (d, v);
9507 else
9508 SET_DECL_VALUE_EXPR (d, new_var);
9509 DECL_HAS_VALUE_EXPR_P (d) = 1;
9510 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9511 if (oldv)
9512 SET_DECL_VALUE_EXPR (d, oldv);
9513 else
9515 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9516 DECL_HAS_VALUE_EXPR_P (d) = 0;
9518 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9519 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9520 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9521 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9522 if (cancellable)
9523 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9524 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9525 if (x)
9527 gimple_seq tseq = NULL;
9528 gimplify_stmt (&x, &tseq);
9529 gimple_seq_add_seq (end, tseq);
9532 else
9534 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9535 ref = unshare_expr (ref);
9536 gimplify_assign (ref, x, end);
9538 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9539 ++cnt;
9540 field = DECL_CHAIN (bfield);
9544 if (code == OMP_TASKGROUP)
9546 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9547 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9548 gimple_seq_add_stmt (start, g);
9550 else
9552 tree c;
9553 if (code == OMP_FOR)
9554 c = gimple_omp_for_clauses (ctx->stmt);
9555 else if (code == OMP_SECTIONS)
9556 c = gimple_omp_sections_clauses (ctx->stmt);
9557 else if (code == OMP_SCOPE)
9558 c = gimple_omp_scope_clauses (ctx->stmt);
9559 else
9560 c = gimple_omp_taskreg_clauses (ctx->stmt);
9561 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9562 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9563 build_fold_addr_expr (avar));
9564 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9567 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9568 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9569 size_one_node));
9570 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9571 gimple_seq_add_stmt (end, g);
9572 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9573 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9575 enum built_in_function bfn
9576 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9577 t = builtin_decl_explicit (bfn);
9578 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9579 tree arg;
9580 if (cancellable)
9582 arg = create_tmp_var (c_bool_type);
9583 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9584 cancellable));
9586 else
9587 arg = build_int_cst (c_bool_type, 0);
9588 g = gimple_build_call (t, 1, arg);
9590 else
9592 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9593 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9595 gimple_seq_add_stmt (end, g);
9596 if (lab7)
9597 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9598 t = build_constructor (atype, NULL);
9599 TREE_THIS_VOLATILE (t) = 1;
9600 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9603 /* Expand code for an OpenMP taskgroup directive. */
9605 static void
9606 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9608 gimple *stmt = gsi_stmt (*gsi_p);
9609 gcall *x;
9610 gbind *bind;
9611 gimple_seq dseq = NULL;
9612 tree block = make_node (BLOCK);
9614 bind = gimple_build_bind (NULL, NULL, block);
9615 gsi_replace (gsi_p, bind, true);
9616 gimple_bind_add_stmt (bind, stmt);
9618 push_gimplify_context ();
9620 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9622 gimple_bind_add_stmt (bind, x);
9624 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9625 gimple_omp_taskgroup_clauses (stmt),
9626 gimple_bind_body_ptr (bind), &dseq);
9628 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9629 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9630 gimple_omp_set_body (stmt, NULL);
9632 gimple_bind_add_seq (bind, dseq);
9634 pop_gimplify_context (bind);
9636 gimple_bind_append_vars (bind, ctx->block_vars);
9637 BLOCK_VARS (block) = ctx->block_vars;
9641 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9643 static void
9644 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9645 omp_context *ctx)
9647 struct omp_for_data fd;
9648 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9649 return;
9651 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9652 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9653 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9654 if (!fd.ordered)
9655 return;
9657 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9658 tree c = gimple_omp_ordered_clauses (ord_stmt);
9659 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
9660 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
9662 /* Merge depend clauses from multiple adjacent
9663 #pragma omp ordered depend(sink:...) constructs
9664 into one #pragma omp ordered depend(sink:...), so that
9665 we can optimize them together. */
9666 gimple_stmt_iterator gsi = *gsi_p;
9667 gsi_next (&gsi);
9668 while (!gsi_end_p (gsi))
9670 gimple *stmt = gsi_stmt (gsi);
9671 if (is_gimple_debug (stmt)
9672 || gimple_code (stmt) == GIMPLE_NOP)
9674 gsi_next (&gsi);
9675 continue;
9677 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9678 break;
9679 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9680 c = gimple_omp_ordered_clauses (ord_stmt2);
9681 if (c == NULL_TREE
9682 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS
9683 || OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK)
9684 break;
9685 while (*list_p)
9686 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9687 *list_p = c;
9688 gsi_remove (&gsi, true);
9692 /* Canonicalize sink dependence clauses into one folded clause if
9693 possible.
9695 The basic algorithm is to create a sink vector whose first
9696 element is the GCD of all the first elements, and whose remaining
9697 elements are the minimum of the subsequent columns.
9699 We ignore dependence vectors whose first element is zero because
9700 such dependencies are known to be executed by the same thread.
9702 We take into account the direction of the loop, so a minimum
9703 becomes a maximum if the loop is iterating forwards. We also
9704 ignore sink clauses where the loop direction is unknown, or where
9705 the offsets are clearly invalid because they are not a multiple
9706 of the loop increment.
9708 For example:
9710 #pragma omp for ordered(2)
9711 for (i=0; i < N; ++i)
9712 for (j=0; j < M; ++j)
9714 #pragma omp ordered \
9715 depend(sink:i-8,j-2) \
9716 depend(sink:i,j-1) \ // Completely ignored because i+0.
9717 depend(sink:i-4,j-3) \
9718 depend(sink:i-6,j-4)
9719 #pragma omp ordered depend(source)
9722 Folded clause is:
9724 depend(sink:-gcd(8,4,6),-min(2,3,4))
9725 -or-
9726 depend(sink:-2,-2)
9729 /* FIXME: Computing GCD's where the first element is zero is
9730 non-trivial in the presence of collapsed loops. Do this later. */
9731 if (fd.collapse > 1)
9732 return;
9734 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9736 /* wide_int is not a POD so it must be default-constructed. */
9737 for (unsigned i = 0; i != 2 * len - 1; ++i)
9738 new (static_cast<void*>(folded_deps + i)) wide_int ();
9740 tree folded_dep = NULL_TREE;
9741 /* TRUE if the first dimension's offset is negative. */
9742 bool neg_offset_p = false;
9744 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9745 unsigned int i;
9746 while ((c = *list_p) != NULL)
9748 bool remove = false;
9750 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS);
9751 if (OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK)
9752 goto next_ordered_clause;
9754 tree vec;
9755 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9756 vec && TREE_CODE (vec) == TREE_LIST;
9757 vec = TREE_CHAIN (vec), ++i)
9759 gcc_assert (i < len);
9761 /* omp_extract_for_data has canonicalized the condition. */
9762 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9763 || fd.loops[i].cond_code == GT_EXPR);
9764 bool forward = fd.loops[i].cond_code == LT_EXPR;
9765 bool maybe_lexically_later = true;
9767 /* While the committee makes up its mind, bail if we have any
9768 non-constant steps. */
9769 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9770 goto lower_omp_ordered_ret;
9772 tree itype = TREE_TYPE (TREE_VALUE (vec));
9773 if (POINTER_TYPE_P (itype))
9774 itype = sizetype;
9775 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9776 TYPE_PRECISION (itype),
9777 TYPE_SIGN (itype));
9779 /* Ignore invalid offsets that are not multiples of the step. */
9780 if (!wi::multiple_of_p (wi::abs (offset),
9781 wi::abs (wi::to_wide (fd.loops[i].step)),
9782 UNSIGNED))
9784 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9785 "ignoring sink clause with offset that is not "
9786 "a multiple of the loop step");
9787 remove = true;
9788 goto next_ordered_clause;
9791 /* Calculate the first dimension. The first dimension of
9792 the folded dependency vector is the GCD of the first
9793 elements, while ignoring any first elements whose offset
9794 is 0. */
9795 if (i == 0)
9797 /* Ignore dependence vectors whose first dimension is 0. */
9798 if (offset == 0)
9800 remove = true;
9801 goto next_ordered_clause;
9803 else
9805 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9807 error_at (OMP_CLAUSE_LOCATION (c),
9808 "first offset must be in opposite direction "
9809 "of loop iterations");
9810 goto lower_omp_ordered_ret;
9812 if (forward)
9813 offset = -offset;
9814 neg_offset_p = forward;
9815 /* Initialize the first time around. */
9816 if (folded_dep == NULL_TREE)
9818 folded_dep = c;
9819 folded_deps[0] = offset;
9821 else
9822 folded_deps[0] = wi::gcd (folded_deps[0],
9823 offset, UNSIGNED);
9826 /* Calculate minimum for the remaining dimensions. */
9827 else
9829 folded_deps[len + i - 1] = offset;
9830 if (folded_dep == c)
9831 folded_deps[i] = offset;
9832 else if (maybe_lexically_later
9833 && !wi::eq_p (folded_deps[i], offset))
9835 if (forward ^ wi::gts_p (folded_deps[i], offset))
9837 unsigned int j;
9838 folded_dep = c;
9839 for (j = 1; j <= i; j++)
9840 folded_deps[j] = folded_deps[len + j - 1];
9842 else
9843 maybe_lexically_later = false;
9847 gcc_assert (i == len);
9849 remove = true;
9851 next_ordered_clause:
9852 if (remove)
9853 *list_p = OMP_CLAUSE_CHAIN (c);
9854 else
9855 list_p = &OMP_CLAUSE_CHAIN (c);
9858 if (folded_dep)
9860 if (neg_offset_p)
9861 folded_deps[0] = -folded_deps[0];
9863 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9864 if (POINTER_TYPE_P (itype))
9865 itype = sizetype;
9867 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9868 = wide_int_to_tree (itype, folded_deps[0]);
9869 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9870 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9873 lower_omp_ordered_ret:
9875 /* Ordered without clauses is #pragma omp threads, while we want
9876 a nop instead if we remove all clauses. */
9877 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9878 gsi_replace (gsi_p, gimple_build_nop (), true);
9882 /* Expand code for an OpenMP ordered directive. */
9884 static void
9885 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9887 tree block;
9888 gimple *stmt = gsi_stmt (*gsi_p), *g;
9889 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9890 gcall *x;
9891 gbind *bind;
9892 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9893 OMP_CLAUSE_SIMD);
9894 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9895 loop. */
9896 bool maybe_simt
9897 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9898 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9899 OMP_CLAUSE_THREADS);
9901 if (gimple_omp_ordered_standalone_p (ord_stmt))
9903 /* FIXME: This is needs to be moved to the expansion to verify various
9904 conditions only testable on cfg with dominators computed, and also
9905 all the depend clauses to be merged still might need to be available
9906 for the runtime checks. */
9907 if (0)
9908 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9909 return;
9912 push_gimplify_context ();
9914 block = make_node (BLOCK);
9915 bind = gimple_build_bind (NULL, NULL, block);
9916 gsi_replace (gsi_p, bind, true);
9917 gimple_bind_add_stmt (bind, stmt);
9919 if (simd)
9921 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9922 build_int_cst (NULL_TREE, threads));
9923 cfun->has_simduid_loops = true;
9925 else
9926 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9928 gimple_bind_add_stmt (bind, x);
9930 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9931 if (maybe_simt)
9933 counter = create_tmp_var (integer_type_node);
9934 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9935 gimple_call_set_lhs (g, counter);
9936 gimple_bind_add_stmt (bind, g);
9938 body = create_artificial_label (UNKNOWN_LOCATION);
9939 test = create_artificial_label (UNKNOWN_LOCATION);
9940 gimple_bind_add_stmt (bind, gimple_build_label (body));
9942 tree simt_pred = create_tmp_var (integer_type_node);
9943 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9944 gimple_call_set_lhs (g, simt_pred);
9945 gimple_bind_add_stmt (bind, g);
9947 tree t = create_artificial_label (UNKNOWN_LOCATION);
9948 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9949 gimple_bind_add_stmt (bind, g);
9951 gimple_bind_add_stmt (bind, gimple_build_label (t));
9953 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9954 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9955 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9956 gimple_omp_set_body (stmt, NULL);
9958 if (maybe_simt)
9960 gimple_bind_add_stmt (bind, gimple_build_label (test));
9961 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9962 gimple_bind_add_stmt (bind, g);
9964 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9965 tree nonneg = create_tmp_var (integer_type_node);
9966 gimple_seq tseq = NULL;
9967 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9968 gimple_bind_add_seq (bind, tseq);
9970 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9971 gimple_call_set_lhs (g, nonneg);
9972 gimple_bind_add_stmt (bind, g);
9974 tree end = create_artificial_label (UNKNOWN_LOCATION);
9975 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9976 gimple_bind_add_stmt (bind, g);
9978 gimple_bind_add_stmt (bind, gimple_build_label (end));
9980 if (simd)
9981 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9982 build_int_cst (NULL_TREE, threads));
9983 else
9984 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9986 gimple_bind_add_stmt (bind, x);
9988 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9990 pop_gimplify_context (bind);
9992 gimple_bind_append_vars (bind, ctx->block_vars);
9993 BLOCK_VARS (block) = gimple_bind_vars (bind);
9997 /* Expand code for an OpenMP scan directive and the structured block
9998 before the scan directive. */
10000 static void
10001 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10003 gimple *stmt = gsi_stmt (*gsi_p);
10004 bool has_clauses
10005 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
10006 tree lane = NULL_TREE;
10007 gimple_seq before = NULL;
10008 omp_context *octx = ctx->outer;
10009 gcc_assert (octx);
10010 if (octx->scan_exclusive && !has_clauses)
10012 gimple_stmt_iterator gsi2 = *gsi_p;
10013 gsi_next (&gsi2);
10014 gimple *stmt2 = gsi_stmt (gsi2);
10015 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10016 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10017 the one with exclusive clause(s), comes first. */
10018 if (stmt2
10019 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
10020 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
10022 gsi_remove (gsi_p, false);
10023 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
10024 ctx = maybe_lookup_ctx (stmt2);
10025 gcc_assert (ctx);
10026 lower_omp_scan (gsi_p, ctx);
10027 return;
10031 bool input_phase = has_clauses ^ octx->scan_inclusive;
10032 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10033 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
10034 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10035 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
10036 && !gimple_omp_for_combined_p (octx->stmt));
10037 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
10038 if (is_for_simd && octx->for_simd_scan_phase)
10039 is_simd = false;
10040 if (is_simd)
10041 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
10042 OMP_CLAUSE__SIMDUID_))
10044 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10045 lane = create_tmp_var (unsigned_type_node);
10046 tree t = build_int_cst (integer_type_node,
10047 input_phase ? 1
10048 : octx->scan_inclusive ? 2 : 3);
10049 gimple *g
10050 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10051 gimple_call_set_lhs (g, lane);
10052 gimple_seq_add_stmt (&before, g);
10055 if (is_simd || is_for)
10057 for (tree c = gimple_omp_for_clauses (octx->stmt);
10058 c; c = OMP_CLAUSE_CHAIN (c))
10059 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10060 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10062 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10063 tree var = OMP_CLAUSE_DECL (c);
10064 tree new_var = lookup_decl (var, octx);
10065 tree val = new_var;
10066 tree var2 = NULL_TREE;
10067 tree var3 = NULL_TREE;
10068 tree var4 = NULL_TREE;
10069 tree lane0 = NULL_TREE;
10070 tree new_vard = new_var;
10071 if (omp_privatize_by_reference (var))
10073 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10074 val = new_var;
10076 if (DECL_HAS_VALUE_EXPR_P (new_vard))
10078 val = DECL_VALUE_EXPR (new_vard);
10079 if (new_vard != new_var)
10081 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10082 val = TREE_OPERAND (val, 0);
10084 if (TREE_CODE (val) == ARRAY_REF
10085 && VAR_P (TREE_OPERAND (val, 0)))
10087 tree v = TREE_OPERAND (val, 0);
10088 if (lookup_attribute ("omp simd array",
10089 DECL_ATTRIBUTES (v)))
10091 val = unshare_expr (val);
10092 lane0 = TREE_OPERAND (val, 1);
10093 TREE_OPERAND (val, 1) = lane;
10094 var2 = lookup_decl (v, octx);
10095 if (octx->scan_exclusive)
10096 var4 = lookup_decl (var2, octx);
10097 if (input_phase
10098 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10099 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10100 if (!input_phase)
10102 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10103 var2, lane, NULL_TREE, NULL_TREE);
10104 TREE_THIS_NOTRAP (var2) = 1;
10105 if (octx->scan_exclusive)
10107 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10108 var4, lane, NULL_TREE,
10109 NULL_TREE);
10110 TREE_THIS_NOTRAP (var4) = 1;
10113 else
10114 var2 = val;
10117 gcc_assert (var2);
10119 else
10121 var2 = build_outer_var_ref (var, octx);
10122 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10124 var3 = maybe_lookup_decl (new_vard, octx);
10125 if (var3 == new_vard || var3 == NULL_TREE)
10126 var3 = NULL_TREE;
10127 else if (is_simd && octx->scan_exclusive && !input_phase)
10129 var4 = maybe_lookup_decl (var3, octx);
10130 if (var4 == var3 || var4 == NULL_TREE)
10132 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10134 var4 = var3;
10135 var3 = NULL_TREE;
10137 else
10138 var4 = NULL_TREE;
10142 if (is_simd
10143 && octx->scan_exclusive
10144 && !input_phase
10145 && var4 == NULL_TREE)
10146 var4 = create_tmp_var (TREE_TYPE (val));
10148 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10150 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10151 if (input_phase)
10153 if (var3)
10155 /* If we've added a separate identity element
10156 variable, copy it over into val. */
10157 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10158 var3);
10159 gimplify_and_add (x, &before);
10161 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10163 /* Otherwise, assign to it the identity element. */
10164 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10165 if (is_for)
10166 tseq = copy_gimple_seq_and_replace_locals (tseq);
10167 tree ref = build_outer_var_ref (var, octx);
10168 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10169 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10170 if (x)
10172 if (new_vard != new_var)
10173 val = build_fold_addr_expr_loc (clause_loc, val);
10174 SET_DECL_VALUE_EXPR (new_vard, val);
10176 SET_DECL_VALUE_EXPR (placeholder, ref);
10177 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10178 lower_omp (&tseq, octx);
10179 if (x)
10180 SET_DECL_VALUE_EXPR (new_vard, x);
10181 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10182 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10183 gimple_seq_add_seq (&before, tseq);
10184 if (is_simd)
10185 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10188 else if (is_simd)
10190 tree x;
10191 if (octx->scan_exclusive)
10193 tree v4 = unshare_expr (var4);
10194 tree v2 = unshare_expr (var2);
10195 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10196 gimplify_and_add (x, &before);
10198 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10199 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10200 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10201 tree vexpr = val;
10202 if (x && new_vard != new_var)
10203 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10204 if (x)
10205 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10206 SET_DECL_VALUE_EXPR (placeholder, var2);
10207 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10208 lower_omp (&tseq, octx);
10209 gimple_seq_add_seq (&before, tseq);
10210 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10211 if (x)
10212 SET_DECL_VALUE_EXPR (new_vard, x);
10213 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10214 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10215 if (octx->scan_inclusive)
10217 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10218 var2);
10219 gimplify_and_add (x, &before);
10221 else if (lane0 == NULL_TREE)
10223 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10224 var4);
10225 gimplify_and_add (x, &before);
10229 else
10231 if (input_phase)
10233 /* input phase. Set val to initializer before
10234 the body. */
10235 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10236 gimplify_assign (val, x, &before);
10238 else if (is_simd)
10240 /* scan phase. */
10241 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10242 if (code == MINUS_EXPR)
10243 code = PLUS_EXPR;
10245 tree x = build2 (code, TREE_TYPE (var2),
10246 unshare_expr (var2), unshare_expr (val));
10247 if (octx->scan_inclusive)
10249 gimplify_assign (unshare_expr (var2), x, &before);
10250 gimplify_assign (val, var2, &before);
10252 else
10254 gimplify_assign (unshare_expr (var4),
10255 unshare_expr (var2), &before);
10256 gimplify_assign (var2, x, &before);
10257 if (lane0 == NULL_TREE)
10258 gimplify_assign (val, var4, &before);
10262 if (octx->scan_exclusive && !input_phase && lane0)
10264 tree vexpr = unshare_expr (var4);
10265 TREE_OPERAND (vexpr, 1) = lane0;
10266 if (new_vard != new_var)
10267 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10268 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10272 if (is_simd && !is_for_simd)
10274 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10275 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10276 gsi_replace (gsi_p, gimple_build_nop (), true);
10277 return;
10279 lower_omp (gimple_omp_body_ptr (stmt), octx);
10280 if (before)
10282 gimple_stmt_iterator gsi = gsi_start (*gimple_omp_body_ptr (stmt));
10283 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10288 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10289 substitution of a couple of function calls. But in the NAMED case,
10290 requires that languages coordinate a symbol name. It is therefore
10291 best put here in common code. */
10293 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10295 static void
10296 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10298 tree block;
10299 tree name, lock, unlock;
10300 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10301 gbind *bind;
10302 location_t loc = gimple_location (stmt);
10303 gimple_seq tbody;
10305 name = gimple_omp_critical_name (stmt);
10306 if (name)
10308 tree decl;
10310 if (!critical_name_mutexes)
10311 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10313 tree *n = critical_name_mutexes->get (name);
10314 if (n == NULL)
10316 char *new_str;
10318 decl = create_tmp_var_raw (ptr_type_node);
10320 new_str = ACONCAT ((".gomp_critical_user_",
10321 IDENTIFIER_POINTER (name), NULL));
10322 DECL_NAME (decl) = get_identifier (new_str);
10323 TREE_PUBLIC (decl) = 1;
10324 TREE_STATIC (decl) = 1;
10325 DECL_COMMON (decl) = 1;
10326 DECL_ARTIFICIAL (decl) = 1;
10327 DECL_IGNORED_P (decl) = 1;
10329 varpool_node::finalize_decl (decl);
10331 critical_name_mutexes->put (name, decl);
10333 else
10334 decl = *n;
10336 /* If '#pragma omp critical' is inside offloaded region or
10337 inside function marked as offloadable, the symbol must be
10338 marked as offloadable too. */
10339 omp_context *octx;
10340 if (cgraph_node::get (current_function_decl)->offloadable)
10341 varpool_node::get_create (decl)->offloadable = 1;
10342 else
10343 for (octx = ctx->outer; octx; octx = octx->outer)
10344 if (is_gimple_omp_offloaded (octx->stmt))
10346 varpool_node::get_create (decl)->offloadable = 1;
10347 break;
10350 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10351 lock = build_call_expr_loc (loc, lock, 1,
10352 build_fold_addr_expr_loc (loc, decl));
10354 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10355 unlock = build_call_expr_loc (loc, unlock, 1,
10356 build_fold_addr_expr_loc (loc, decl));
10358 else
10360 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10361 lock = build_call_expr_loc (loc, lock, 0);
10363 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10364 unlock = build_call_expr_loc (loc, unlock, 0);
10367 push_gimplify_context ();
10369 block = make_node (BLOCK);
10370 bind = gimple_build_bind (NULL, NULL, block);
10371 gsi_replace (gsi_p, bind, true);
10372 gimple_bind_add_stmt (bind, stmt);
10374 tbody = gimple_bind_body (bind);
10375 gimplify_and_add (lock, &tbody);
10376 gimple_bind_set_body (bind, tbody);
10378 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10379 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10380 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10381 gimple_omp_set_body (stmt, NULL);
10383 tbody = gimple_bind_body (bind);
10384 gimplify_and_add (unlock, &tbody);
10385 gimple_bind_set_body (bind, tbody);
10387 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10389 pop_gimplify_context (bind);
10390 gimple_bind_append_vars (bind, ctx->block_vars);
10391 BLOCK_VARS (block) = gimple_bind_vars (bind);
10394 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10395 for a lastprivate clause. Given a loop control predicate of (V
10396 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10397 is appended to *DLIST, iterator initialization is appended to
10398 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10399 to be emitted in a critical section. */
10401 static void
10402 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10403 gimple_seq *dlist, gimple_seq *clist,
10404 struct omp_context *ctx)
10406 tree clauses, cond, vinit;
10407 enum tree_code cond_code;
10408 gimple_seq stmts;
10410 cond_code = fd->loop.cond_code;
10411 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10413 /* When possible, use a strict equality expression. This can let VRP
10414 type optimizations deduce the value and remove a copy. */
10415 if (tree_fits_shwi_p (fd->loop.step))
10417 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10418 if (step == 1 || step == -1)
10419 cond_code = EQ_EXPR;
10422 tree n2 = fd->loop.n2;
10423 if (fd->collapse > 1
10424 && TREE_CODE (n2) != INTEGER_CST
10425 && gimple_omp_for_combined_into_p (fd->for_stmt))
10427 struct omp_context *taskreg_ctx = NULL;
10428 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10430 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10431 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10432 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10434 if (gimple_omp_for_combined_into_p (gfor))
10436 gcc_assert (ctx->outer->outer
10437 && is_parallel_ctx (ctx->outer->outer));
10438 taskreg_ctx = ctx->outer->outer;
10440 else
10442 struct omp_for_data outer_fd;
10443 omp_extract_for_data (gfor, &outer_fd, NULL);
10444 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10447 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10448 taskreg_ctx = ctx->outer->outer;
10450 else if (is_taskreg_ctx (ctx->outer))
10451 taskreg_ctx = ctx->outer;
10452 if (taskreg_ctx)
10454 int i;
10455 tree taskreg_clauses
10456 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10457 tree innerc = omp_find_clause (taskreg_clauses,
10458 OMP_CLAUSE__LOOPTEMP_);
10459 gcc_assert (innerc);
10460 int count = fd->collapse;
10461 if (fd->non_rect
10462 && fd->last_nonrect == fd->first_nonrect + 1)
10463 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10464 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10465 count += 4;
10466 for (i = 0; i < count; i++)
10468 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10469 OMP_CLAUSE__LOOPTEMP_);
10470 gcc_assert (innerc);
10472 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10473 OMP_CLAUSE__LOOPTEMP_);
10474 if (innerc)
10475 n2 = fold_convert (TREE_TYPE (n2),
10476 lookup_decl (OMP_CLAUSE_DECL (innerc),
10477 taskreg_ctx));
10480 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10482 clauses = gimple_omp_for_clauses (fd->for_stmt);
10483 stmts = NULL;
10484 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10485 if (!gimple_seq_empty_p (stmts))
10487 gimple_seq_add_seq (&stmts, *dlist);
10488 *dlist = stmts;
10490 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10491 vinit = fd->loop.n1;
10492 if (cond_code == EQ_EXPR
10493 && tree_fits_shwi_p (fd->loop.n2)
10494 && ! integer_zerop (fd->loop.n2))
10495 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10496 else
10497 vinit = unshare_expr (vinit);
10499 /* Initialize the iterator variable, so that threads that don't execute
10500 any iterations don't execute the lastprivate clauses by accident. */
10501 gimplify_assign (fd->loop.v, vinit, body_p);
10505 /* OpenACC privatization.
10507 Or, in other words, *sharing* at the respective OpenACC level of
10508 parallelism.
10510 From a correctness perspective, a non-addressable variable can't be accessed
10511 outside the current thread, so it can go in a (faster than shared memory)
10512 register -- though that register may need to be broadcast in some
10513 circumstances. A variable can only meaningfully be "shared" across workers
10514 or vector lanes if its address is taken, e.g. by a call to an atomic
10515 builtin.
10517 From an optimisation perspective, the answer might be fuzzier: maybe
10518 sometimes, using shared memory directly would be faster than
10519 broadcasting. */
10521 static void
10522 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10523 const location_t loc, const tree c,
10524 const tree decl)
10526 const dump_user_location_t d_u_loc
10527 = dump_user_location_t::from_location_t (loc);
10528 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10529 #if __GNUC__ >= 10
10530 # pragma GCC diagnostic push
10531 # pragma GCC diagnostic ignored "-Wformat"
10532 #endif
10533 dump_printf_loc (l_dump_flags, d_u_loc,
10534 "variable %<%T%> ", decl);
10535 #if __GNUC__ >= 10
10536 # pragma GCC diagnostic pop
10537 #endif
10538 if (c)
10539 dump_printf (l_dump_flags,
10540 "in %qs clause ",
10541 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10542 else
10543 dump_printf (l_dump_flags,
10544 "declared in block ");
10547 static bool
10548 oacc_privatization_candidate_p (const location_t loc, const tree c,
10549 const tree decl)
10551 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10553 /* There is some differentiation depending on block vs. clause. */
10554 bool block = !c;
10556 bool res = true;
10558 if (res && !VAR_P (decl))
10560 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10561 privatized into a new VAR_DECL. */
10562 gcc_checking_assert (TREE_CODE (decl) != PARM_DECL);
10564 res = false;
10566 if (dump_enabled_p ())
10568 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10569 dump_printf (l_dump_flags,
10570 "potentially has improper OpenACC privatization level: %qs\n",
10571 get_tree_code_name (TREE_CODE (decl)));
10575 if (res && block && TREE_STATIC (decl))
10577 res = false;
10579 if (dump_enabled_p ())
10581 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10582 dump_printf (l_dump_flags,
10583 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10584 "static");
10588 if (res && block && DECL_EXTERNAL (decl))
10590 res = false;
10592 if (dump_enabled_p ())
10594 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10595 dump_printf (l_dump_flags,
10596 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10597 "external");
10601 if (res && !TREE_ADDRESSABLE (decl))
10603 res = false;
10605 if (dump_enabled_p ())
10607 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10608 dump_printf (l_dump_flags,
10609 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10610 "not addressable");
10614 /* If an artificial variable has been added to a bind, e.g.
10615 a compiler-generated temporary structure used by the Fortran front-end, do
10616 not consider it as a privatization candidate. Note that variables on
10617 the stack are private per-thread by default: making them "gang-private"
10618 for OpenACC actually means to share a single instance of a variable
10619 amongst all workers and threads spawned within each gang.
10620 At present, no compiler-generated artificial variables require such
10621 sharing semantics, so this is safe. */
10623 if (res && block && DECL_ARTIFICIAL (decl))
10625 res = false;
10627 if (dump_enabled_p ())
10629 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10630 dump_printf (l_dump_flags,
10631 "isn%'t candidate for adjusting OpenACC privatization "
10632 "level: %s\n", "artificial");
10636 if (res)
10638 if (dump_enabled_p ())
10640 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10641 dump_printf (l_dump_flags,
10642 "is candidate for adjusting OpenACC privatization level\n");
10646 if (dump_file && (dump_flags & TDF_DETAILS))
10648 print_generic_decl (dump_file, decl, dump_flags);
10649 fprintf (dump_file, "\n");
10652 return res;
10655 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10656 CTX. */
10658 static void
10659 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10661 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10662 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10664 tree decl = OMP_CLAUSE_DECL (c);
10666 tree new_decl = lookup_decl (decl, ctx);
10668 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c,
10669 new_decl))
10670 continue;
10672 gcc_checking_assert
10673 (!ctx->oacc_privatization_candidates.contains (new_decl));
10674 ctx->oacc_privatization_candidates.safe_push (new_decl);
10678 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10679 CTX. */
10681 static void
10682 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10684 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10686 tree new_decl = lookup_decl (decl, ctx);
10687 gcc_checking_assert (new_decl == decl);
10689 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL,
10690 new_decl))
10691 continue;
10693 gcc_checking_assert
10694 (!ctx->oacc_privatization_candidates.contains (new_decl));
10695 ctx->oacc_privatization_candidates.safe_push (new_decl);
10699 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10701 static tree
10702 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10703 struct walk_stmt_info *wi)
10705 gimple *stmt = gsi_stmt (*gsi_p);
10707 *handled_ops_p = true;
10708 switch (gimple_code (stmt))
10710 WALK_SUBSTMTS;
10712 case GIMPLE_OMP_FOR:
10713 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10714 && gimple_omp_for_combined_into_p (stmt))
10715 *handled_ops_p = false;
10716 break;
10718 case GIMPLE_OMP_SCAN:
10719 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10720 return integer_zero_node;
10721 default:
10722 break;
10724 return NULL;
10727 /* Helper function for lower_omp_for, add transformations for a worksharing
10728 loop with scan directives inside of it.
10729 For worksharing loop not combined with simd, transform:
10730 #pragma omp for reduction(inscan,+:r) private(i)
10731 for (i = 0; i < n; i = i + 1)
10734 update (r);
10736 #pragma omp scan inclusive(r)
10738 use (r);
10742 into two worksharing loops + code to merge results:
10744 num_threads = omp_get_num_threads ();
10745 thread_num = omp_get_thread_num ();
10746 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10747 <D.2099>:
10748 var2 = r;
10749 goto <D.2101>;
10750 <D.2100>:
10751 // For UDRs this is UDR init, or if ctors are needed, copy from
10752 // var3 that has been constructed to contain the neutral element.
10753 var2 = 0;
10754 <D.2101>:
10755 ivar = 0;
10756 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10757 // a shared array with num_threads elements and rprivb to a local array
10758 // number of elements equal to the number of (contiguous) iterations the
10759 // current thread will perform. controlb and controlp variables are
10760 // temporaries to handle deallocation of rprivb at the end of second
10761 // GOMP_FOR.
10762 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10763 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10764 for (i = 0; i < n; i = i + 1)
10767 // For UDRs this is UDR init or copy from var3.
10768 r = 0;
10769 // This is the input phase from user code.
10770 update (r);
10773 // For UDRs this is UDR merge.
10774 var2 = var2 + r;
10775 // Rather than handing it over to the user, save to local thread's
10776 // array.
10777 rprivb[ivar] = var2;
10778 // For exclusive scan, the above two statements are swapped.
10779 ivar = ivar + 1;
10782 // And remember the final value from this thread's into the shared
10783 // rpriva array.
10784 rpriva[(sizetype) thread_num] = var2;
10785 // If more than one thread, compute using Work-Efficient prefix sum
10786 // the inclusive parallel scan of the rpriva array.
10787 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10788 <D.2102>:
10789 GOMP_barrier ();
10790 down = 0;
10791 k = 1;
10792 num_threadsu = (unsigned int) num_threads;
10793 thread_numup1 = (unsigned int) thread_num + 1;
10794 <D.2108>:
10795 twok = k << 1;
10796 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10797 <D.2110>:
10798 down = 4294967295;
10799 k = k >> 1;
10800 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10801 <D.2112>:
10802 k = k >> 1;
10803 <D.2111>:
10804 twok = k << 1;
10805 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10806 mul = REALPART_EXPR <cplx>;
10807 ovf = IMAGPART_EXPR <cplx>;
10808 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10809 <D.2116>:
10810 andv = k & down;
10811 andvm1 = andv + 4294967295;
10812 l = mul + andvm1;
10813 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10814 <D.2120>:
10815 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10816 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10817 rpriva[l] = rpriva[l - k] + rpriva[l];
10818 <D.2117>:
10819 if (down == 0) goto <D.2121>; else goto <D.2122>;
10820 <D.2121>:
10821 k = k << 1;
10822 goto <D.2123>;
10823 <D.2122>:
10824 k = k >> 1;
10825 <D.2123>:
10826 GOMP_barrier ();
10827 if (k != 0) goto <D.2108>; else goto <D.2103>;
10828 <D.2103>:
10829 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10830 <D.2124>:
10831 // For UDRs this is UDR init or copy from var3.
10832 var2 = 0;
10833 goto <D.2126>;
10834 <D.2125>:
10835 var2 = rpriva[thread_num - 1];
10836 <D.2126>:
10837 ivar = 0;
10838 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10839 reduction(inscan,+:r) private(i)
10840 for (i = 0; i < n; i = i + 1)
10843 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10844 r = var2 + rprivb[ivar];
10847 // This is the scan phase from user code.
10848 use (r);
10849 // Plus a bump of the iterator.
10850 ivar = ivar + 1;
10852 } */
10854 static void
10855 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10856 struct omp_for_data *fd, omp_context *ctx)
10858 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10859 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10861 gimple_seq body = gimple_omp_body (stmt);
10862 gimple_stmt_iterator input1_gsi = gsi_none ();
10863 struct walk_stmt_info wi;
10864 memset (&wi, 0, sizeof (wi));
10865 wi.val_only = true;
10866 wi.info = (void *) &input1_gsi;
10867 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10868 gcc_assert (!gsi_end_p (input1_gsi));
10870 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10871 gimple_stmt_iterator gsi = input1_gsi;
10872 gsi_next (&gsi);
10873 gimple_stmt_iterator scan1_gsi = gsi;
10874 gimple *scan_stmt1 = gsi_stmt (gsi);
10875 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10877 gimple_seq input_body = gimple_omp_body (input_stmt1);
10878 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10879 gimple_omp_set_body (input_stmt1, NULL);
10880 gimple_omp_set_body (scan_stmt1, NULL);
10881 gimple_omp_set_body (stmt, NULL);
10883 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10884 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10885 gimple_omp_set_body (stmt, body);
10886 gimple_omp_set_body (input_stmt1, input_body);
10888 gimple_stmt_iterator input2_gsi = gsi_none ();
10889 memset (&wi, 0, sizeof (wi));
10890 wi.val_only = true;
10891 wi.info = (void *) &input2_gsi;
10892 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10893 gcc_assert (!gsi_end_p (input2_gsi));
10895 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10896 gsi = input2_gsi;
10897 gsi_next (&gsi);
10898 gimple_stmt_iterator scan2_gsi = gsi;
10899 gimple *scan_stmt2 = gsi_stmt (gsi);
10900 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10901 gimple_omp_set_body (scan_stmt2, scan_body);
10903 gimple_stmt_iterator input3_gsi = gsi_none ();
10904 gimple_stmt_iterator scan3_gsi = gsi_none ();
10905 gimple_stmt_iterator input4_gsi = gsi_none ();
10906 gimple_stmt_iterator scan4_gsi = gsi_none ();
10907 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10908 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10909 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10910 if (is_for_simd)
10912 memset (&wi, 0, sizeof (wi));
10913 wi.val_only = true;
10914 wi.info = (void *) &input3_gsi;
10915 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10916 gcc_assert (!gsi_end_p (input3_gsi));
10918 input_stmt3 = gsi_stmt (input3_gsi);
10919 gsi = input3_gsi;
10920 gsi_next (&gsi);
10921 scan3_gsi = gsi;
10922 scan_stmt3 = gsi_stmt (gsi);
10923 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10925 memset (&wi, 0, sizeof (wi));
10926 wi.val_only = true;
10927 wi.info = (void *) &input4_gsi;
10928 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10929 gcc_assert (!gsi_end_p (input4_gsi));
10931 input_stmt4 = gsi_stmt (input4_gsi);
10932 gsi = input4_gsi;
10933 gsi_next (&gsi);
10934 scan4_gsi = gsi;
10935 scan_stmt4 = gsi_stmt (gsi);
10936 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10938 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10939 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10942 tree num_threads = create_tmp_var (integer_type_node);
10943 tree thread_num = create_tmp_var (integer_type_node);
10944 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10945 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10946 gimple *g = gimple_build_call (nthreads_decl, 0);
10947 gimple_call_set_lhs (g, num_threads);
10948 gimple_seq_add_stmt (body_p, g);
10949 g = gimple_build_call (threadnum_decl, 0);
10950 gimple_call_set_lhs (g, thread_num);
10951 gimple_seq_add_stmt (body_p, g);
10953 tree ivar = create_tmp_var (sizetype);
10954 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10955 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10956 tree k = create_tmp_var (unsigned_type_node);
10957 tree l = create_tmp_var (unsigned_type_node);
10959 gimple_seq clist = NULL, mdlist = NULL;
10960 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10961 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10962 gimple_seq scan1_list = NULL, input2_list = NULL;
10963 gimple_seq last_list = NULL, reduc_list = NULL;
10964 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10965 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10966 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10968 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10969 tree var = OMP_CLAUSE_DECL (c);
10970 tree new_var = lookup_decl (var, ctx);
10971 tree var3 = NULL_TREE;
10972 tree new_vard = new_var;
10973 if (omp_privatize_by_reference (var))
10974 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10975 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10977 var3 = maybe_lookup_decl (new_vard, ctx);
10978 if (var3 == new_vard)
10979 var3 = NULL_TREE;
10982 tree ptype = build_pointer_type (TREE_TYPE (new_var));
10983 tree rpriva = create_tmp_var (ptype);
10984 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10985 OMP_CLAUSE_DECL (nc) = rpriva;
10986 *cp1 = nc;
10987 cp1 = &OMP_CLAUSE_CHAIN (nc);
10989 tree rprivb = create_tmp_var (ptype);
10990 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10991 OMP_CLAUSE_DECL (nc) = rprivb;
10992 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10993 *cp1 = nc;
10994 cp1 = &OMP_CLAUSE_CHAIN (nc);
10996 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10997 if (new_vard != new_var)
10998 TREE_ADDRESSABLE (var2) = 1;
10999 gimple_add_tmp_var (var2);
11001 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
11002 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11003 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11004 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11005 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
11007 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
11008 thread_num, integer_minus_one_node);
11009 x = fold_convert_loc (clause_loc, sizetype, x);
11010 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11011 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11012 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11013 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
11015 x = fold_convert_loc (clause_loc, sizetype, l);
11016 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11017 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11018 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11019 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
11021 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
11022 x = fold_convert_loc (clause_loc, sizetype, x);
11023 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11024 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11025 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11026 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
11028 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
11029 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11030 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
11031 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
11033 tree var4 = is_for_simd ? new_var : var2;
11034 tree var5 = NULL_TREE, var6 = NULL_TREE;
11035 if (is_for_simd)
11037 var5 = lookup_decl (var, input_simd_ctx);
11038 var6 = lookup_decl (var, scan_simd_ctx);
11039 if (new_vard != new_var)
11041 var5 = build_simple_mem_ref_loc (clause_loc, var5);
11042 var6 = build_simple_mem_ref_loc (clause_loc, var6);
11045 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11047 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
11048 tree val = var2;
11050 x = lang_hooks.decls.omp_clause_default_ctor
11051 (c, var2, build_outer_var_ref (var, ctx));
11052 if (x)
11053 gimplify_and_add (x, &clist);
11055 x = build_outer_var_ref (var, ctx);
11056 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
11058 gimplify_and_add (x, &thr01_list);
11060 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
11061 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
11062 if (var3)
11064 x = unshare_expr (var4);
11065 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11066 gimplify_and_add (x, &thrn1_list);
11067 x = unshare_expr (var4);
11068 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11069 gimplify_and_add (x, &thr02_list);
11071 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
11073 /* Otherwise, assign to it the identity element. */
11074 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11075 tseq = copy_gimple_seq_and_replace_locals (tseq);
11076 if (!is_for_simd)
11078 if (new_vard != new_var)
11079 val = build_fold_addr_expr_loc (clause_loc, val);
11080 SET_DECL_VALUE_EXPR (new_vard, val);
11081 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11083 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11084 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11085 lower_omp (&tseq, ctx);
11086 gimple_seq_add_seq (&thrn1_list, tseq);
11087 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11088 lower_omp (&tseq, ctx);
11089 gimple_seq_add_seq (&thr02_list, tseq);
11090 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11091 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11092 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11093 if (y)
11094 SET_DECL_VALUE_EXPR (new_vard, y);
11095 else
11097 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11098 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11102 x = unshare_expr (var4);
11103 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11104 gimplify_and_add (x, &thrn2_list);
11106 if (is_for_simd)
11108 x = unshare_expr (rprivb_ref);
11109 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11110 gimplify_and_add (x, &scan1_list);
11112 else
11114 if (ctx->scan_exclusive)
11116 x = unshare_expr (rprivb_ref);
11117 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11118 gimplify_and_add (x, &scan1_list);
11121 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11122 tseq = copy_gimple_seq_and_replace_locals (tseq);
11123 SET_DECL_VALUE_EXPR (placeholder, var2);
11124 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11125 lower_omp (&tseq, ctx);
11126 gimple_seq_add_seq (&scan1_list, tseq);
11128 if (ctx->scan_inclusive)
11130 x = unshare_expr (rprivb_ref);
11131 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11132 gimplify_and_add (x, &scan1_list);
11136 x = unshare_expr (rpriva_ref);
11137 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11138 unshare_expr (var4));
11139 gimplify_and_add (x, &mdlist);
11141 x = unshare_expr (is_for_simd ? var6 : new_var);
11142 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11143 gimplify_and_add (x, &input2_list);
11145 val = rprivb_ref;
11146 if (new_vard != new_var)
11147 val = build_fold_addr_expr_loc (clause_loc, val);
11149 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11150 tseq = copy_gimple_seq_and_replace_locals (tseq);
11151 SET_DECL_VALUE_EXPR (new_vard, val);
11152 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11153 if (is_for_simd)
11155 SET_DECL_VALUE_EXPR (placeholder, var6);
11156 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11158 else
11159 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11160 lower_omp (&tseq, ctx);
11161 if (y)
11162 SET_DECL_VALUE_EXPR (new_vard, y);
11163 else
11165 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11166 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11168 if (!is_for_simd)
11170 SET_DECL_VALUE_EXPR (placeholder, new_var);
11171 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11172 lower_omp (&tseq, ctx);
11174 gimple_seq_add_seq (&input2_list, tseq);
11176 x = build_outer_var_ref (var, ctx);
11177 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11178 gimplify_and_add (x, &last_list);
11180 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11181 gimplify_and_add (x, &reduc_list);
11182 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11183 tseq = copy_gimple_seq_and_replace_locals (tseq);
11184 val = rprival_ref;
11185 if (new_vard != new_var)
11186 val = build_fold_addr_expr_loc (clause_loc, val);
11187 SET_DECL_VALUE_EXPR (new_vard, val);
11188 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11189 SET_DECL_VALUE_EXPR (placeholder, var2);
11190 lower_omp (&tseq, ctx);
11191 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11192 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11193 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11194 if (y)
11195 SET_DECL_VALUE_EXPR (new_vard, y);
11196 else
11198 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11199 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11201 gimple_seq_add_seq (&reduc_list, tseq);
11202 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11203 gimplify_and_add (x, &reduc_list);
11205 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11206 if (x)
11207 gimplify_and_add (x, dlist);
11209 else
11211 x = build_outer_var_ref (var, ctx);
11212 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11214 x = omp_reduction_init (c, TREE_TYPE (new_var));
11215 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11216 &thrn1_list);
11217 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11219 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11221 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11222 if (code == MINUS_EXPR)
11223 code = PLUS_EXPR;
11225 if (is_for_simd)
11226 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11227 else
11229 if (ctx->scan_exclusive)
11230 gimplify_assign (unshare_expr (rprivb_ref), var2,
11231 &scan1_list);
11232 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11233 gimplify_assign (var2, x, &scan1_list);
11234 if (ctx->scan_inclusive)
11235 gimplify_assign (unshare_expr (rprivb_ref), var2,
11236 &scan1_list);
11239 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11240 &mdlist);
11242 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11243 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11245 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11246 &last_list);
11248 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11249 unshare_expr (rprival_ref));
11250 gimplify_assign (rprival_ref, x, &reduc_list);
11254 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11255 gimple_seq_add_stmt (&scan1_list, g);
11256 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11257 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11258 ? scan_stmt4 : scan_stmt2), g);
11260 tree controlb = create_tmp_var (boolean_type_node);
11261 tree controlp = create_tmp_var (ptr_type_node);
11262 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11263 OMP_CLAUSE_DECL (nc) = controlb;
11264 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11265 *cp1 = nc;
11266 cp1 = &OMP_CLAUSE_CHAIN (nc);
11267 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11268 OMP_CLAUSE_DECL (nc) = controlp;
11269 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11270 *cp1 = nc;
11271 cp1 = &OMP_CLAUSE_CHAIN (nc);
11272 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11273 OMP_CLAUSE_DECL (nc) = controlb;
11274 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11275 *cp2 = nc;
11276 cp2 = &OMP_CLAUSE_CHAIN (nc);
11277 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11278 OMP_CLAUSE_DECL (nc) = controlp;
11279 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11280 *cp2 = nc;
11281 cp2 = &OMP_CLAUSE_CHAIN (nc);
11283 *cp1 = gimple_omp_for_clauses (stmt);
11284 gimple_omp_for_set_clauses (stmt, new_clauses1);
11285 *cp2 = gimple_omp_for_clauses (new_stmt);
11286 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11288 if (is_for_simd)
11290 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11291 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11293 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11294 GSI_SAME_STMT);
11295 gsi_remove (&input3_gsi, true);
11296 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11297 GSI_SAME_STMT);
11298 gsi_remove (&scan3_gsi, true);
11299 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11300 GSI_SAME_STMT);
11301 gsi_remove (&input4_gsi, true);
11302 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11303 GSI_SAME_STMT);
11304 gsi_remove (&scan4_gsi, true);
11306 else
11308 gimple_omp_set_body (scan_stmt1, scan1_list);
11309 gimple_omp_set_body (input_stmt2, input2_list);
11312 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11313 GSI_SAME_STMT);
11314 gsi_remove (&input1_gsi, true);
11315 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11316 GSI_SAME_STMT);
11317 gsi_remove (&scan1_gsi, true);
11318 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11319 GSI_SAME_STMT);
11320 gsi_remove (&input2_gsi, true);
11321 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11322 GSI_SAME_STMT);
11323 gsi_remove (&scan2_gsi, true);
11325 gimple_seq_add_seq (body_p, clist);
11327 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11328 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11329 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11330 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11331 gimple_seq_add_stmt (body_p, g);
11332 g = gimple_build_label (lab1);
11333 gimple_seq_add_stmt (body_p, g);
11334 gimple_seq_add_seq (body_p, thr01_list);
11335 g = gimple_build_goto (lab3);
11336 gimple_seq_add_stmt (body_p, g);
11337 g = gimple_build_label (lab2);
11338 gimple_seq_add_stmt (body_p, g);
11339 gimple_seq_add_seq (body_p, thrn1_list);
11340 g = gimple_build_label (lab3);
11341 gimple_seq_add_stmt (body_p, g);
11343 g = gimple_build_assign (ivar, size_zero_node);
11344 gimple_seq_add_stmt (body_p, g);
11346 gimple_seq_add_stmt (body_p, stmt);
11347 gimple_seq_add_seq (body_p, body);
11348 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11349 fd->loop.v));
11351 g = gimple_build_omp_return (true);
11352 gimple_seq_add_stmt (body_p, g);
11353 gimple_seq_add_seq (body_p, mdlist);
11355 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11356 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11357 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11358 gimple_seq_add_stmt (body_p, g);
11359 g = gimple_build_label (lab1);
11360 gimple_seq_add_stmt (body_p, g);
11362 g = omp_build_barrier (NULL);
11363 gimple_seq_add_stmt (body_p, g);
11365 tree down = create_tmp_var (unsigned_type_node);
11366 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11367 gimple_seq_add_stmt (body_p, g);
11369 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11370 gimple_seq_add_stmt (body_p, g);
11372 tree num_threadsu = create_tmp_var (unsigned_type_node);
11373 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11374 gimple_seq_add_stmt (body_p, g);
11376 tree thread_numu = create_tmp_var (unsigned_type_node);
11377 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11378 gimple_seq_add_stmt (body_p, g);
11380 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11381 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11382 build_int_cst (unsigned_type_node, 1));
11383 gimple_seq_add_stmt (body_p, g);
11385 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11386 g = gimple_build_label (lab3);
11387 gimple_seq_add_stmt (body_p, g);
11389 tree twok = create_tmp_var (unsigned_type_node);
11390 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11391 gimple_seq_add_stmt (body_p, g);
11393 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11394 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11395 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11396 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11397 gimple_seq_add_stmt (body_p, g);
11398 g = gimple_build_label (lab4);
11399 gimple_seq_add_stmt (body_p, g);
11400 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11401 gimple_seq_add_stmt (body_p, g);
11402 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11403 gimple_seq_add_stmt (body_p, g);
11405 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11406 gimple_seq_add_stmt (body_p, g);
11407 g = gimple_build_label (lab6);
11408 gimple_seq_add_stmt (body_p, g);
11410 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11411 gimple_seq_add_stmt (body_p, g);
11413 g = gimple_build_label (lab5);
11414 gimple_seq_add_stmt (body_p, g);
11416 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11417 gimple_seq_add_stmt (body_p, g);
11419 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11420 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11421 gimple_call_set_lhs (g, cplx);
11422 gimple_seq_add_stmt (body_p, g);
11423 tree mul = create_tmp_var (unsigned_type_node);
11424 g = gimple_build_assign (mul, REALPART_EXPR,
11425 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11426 gimple_seq_add_stmt (body_p, g);
11427 tree ovf = create_tmp_var (unsigned_type_node);
11428 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11429 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11430 gimple_seq_add_stmt (body_p, g);
11432 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11433 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11434 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11435 lab7, lab8);
11436 gimple_seq_add_stmt (body_p, g);
11437 g = gimple_build_label (lab7);
11438 gimple_seq_add_stmt (body_p, g);
11440 tree andv = create_tmp_var (unsigned_type_node);
11441 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11442 gimple_seq_add_stmt (body_p, g);
11443 tree andvm1 = create_tmp_var (unsigned_type_node);
11444 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11445 build_minus_one_cst (unsigned_type_node));
11446 gimple_seq_add_stmt (body_p, g);
11448 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11449 gimple_seq_add_stmt (body_p, g);
11451 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11452 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11453 gimple_seq_add_stmt (body_p, g);
11454 g = gimple_build_label (lab9);
11455 gimple_seq_add_stmt (body_p, g);
11456 gimple_seq_add_seq (body_p, reduc_list);
11457 g = gimple_build_label (lab8);
11458 gimple_seq_add_stmt (body_p, g);
11460 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11461 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11462 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11463 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11464 lab10, lab11);
11465 gimple_seq_add_stmt (body_p, g);
11466 g = gimple_build_label (lab10);
11467 gimple_seq_add_stmt (body_p, g);
11468 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11469 gimple_seq_add_stmt (body_p, g);
11470 g = gimple_build_goto (lab12);
11471 gimple_seq_add_stmt (body_p, g);
11472 g = gimple_build_label (lab11);
11473 gimple_seq_add_stmt (body_p, g);
11474 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11475 gimple_seq_add_stmt (body_p, g);
11476 g = gimple_build_label (lab12);
11477 gimple_seq_add_stmt (body_p, g);
11479 g = omp_build_barrier (NULL);
11480 gimple_seq_add_stmt (body_p, g);
11482 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11483 lab3, lab2);
11484 gimple_seq_add_stmt (body_p, g);
11486 g = gimple_build_label (lab2);
11487 gimple_seq_add_stmt (body_p, g);
11489 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11490 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11491 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11492 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11493 gimple_seq_add_stmt (body_p, g);
11494 g = gimple_build_label (lab1);
11495 gimple_seq_add_stmt (body_p, g);
11496 gimple_seq_add_seq (body_p, thr02_list);
11497 g = gimple_build_goto (lab3);
11498 gimple_seq_add_stmt (body_p, g);
11499 g = gimple_build_label (lab2);
11500 gimple_seq_add_stmt (body_p, g);
11501 gimple_seq_add_seq (body_p, thrn2_list);
11502 g = gimple_build_label (lab3);
11503 gimple_seq_add_stmt (body_p, g);
11505 g = gimple_build_assign (ivar, size_zero_node);
11506 gimple_seq_add_stmt (body_p, g);
11507 gimple_seq_add_stmt (body_p, new_stmt);
11508 gimple_seq_add_seq (body_p, new_body);
11510 gimple_seq new_dlist = NULL;
11511 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11512 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11513 tree num_threadsm1 = create_tmp_var (integer_type_node);
11514 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11515 integer_minus_one_node);
11516 gimple_seq_add_stmt (&new_dlist, g);
11517 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11518 gimple_seq_add_stmt (&new_dlist, g);
11519 g = gimple_build_label (lab1);
11520 gimple_seq_add_stmt (&new_dlist, g);
11521 gimple_seq_add_seq (&new_dlist, last_list);
11522 g = gimple_build_label (lab2);
11523 gimple_seq_add_stmt (&new_dlist, g);
11524 gimple_seq_add_seq (&new_dlist, *dlist);
11525 *dlist = new_dlist;
11528 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11529 the addresses of variables to be made private at the surrounding
11530 parallelism level. Such functions appear in the gimple code stream in two
11531 forms, e.g. for a partitioned loop:
11533 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11534 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11535 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11536 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11538 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11539 not as part of a HEAD_MARK sequence:
11541 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11543 For such stand-alone appearances, the 3rd argument is always 0, denoting
11544 gang partitioning. */
11546 static gcall *
11547 lower_oacc_private_marker (omp_context *ctx)
11549 if (ctx->oacc_privatization_candidates.length () == 0)
11550 return NULL;
11552 auto_vec<tree, 5> args;
11554 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11555 args.quick_push (integer_zero_node);
11556 args.quick_push (integer_minus_one_node);
11558 int i;
11559 tree decl;
11560 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11562 gcc_checking_assert (TREE_ADDRESSABLE (decl));
11563 tree addr = build_fold_addr_expr (decl);
11564 args.safe_push (addr);
11567 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11570 /* Lower code for an OMP loop directive. */
11572 static void
11573 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11575 tree *rhs_p, block;
11576 struct omp_for_data fd, *fdp = NULL;
11577 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11578 gbind *new_stmt;
11579 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11580 gimple_seq cnt_list = NULL, clist = NULL;
11581 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11582 size_t i;
11584 push_gimplify_context ();
11586 if (is_gimple_omp_oacc (ctx->stmt))
11587 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11589 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11591 block = make_node (BLOCK);
11592 new_stmt = gimple_build_bind (NULL, NULL, block);
11593 /* Replace at gsi right away, so that 'stmt' is no member
11594 of a sequence anymore as we're going to add to a different
11595 one below. */
11596 gsi_replace (gsi_p, new_stmt, true);
11598 /* Move declaration of temporaries in the loop body before we make
11599 it go away. */
11600 omp_for_body = gimple_omp_body (stmt);
11601 if (!gimple_seq_empty_p (omp_for_body)
11602 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11604 gbind *inner_bind
11605 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11606 tree vars = gimple_bind_vars (inner_bind);
11607 if (is_gimple_omp_oacc (ctx->stmt))
11608 oacc_privatization_scan_decl_chain (ctx, vars);
11609 gimple_bind_append_vars (new_stmt, vars);
11610 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11611 keep them on the inner_bind and it's block. */
11612 gimple_bind_set_vars (inner_bind, NULL_TREE);
11613 if (gimple_bind_block (inner_bind))
11614 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11617 if (gimple_omp_for_combined_into_p (stmt))
11619 omp_extract_for_data (stmt, &fd, NULL);
11620 fdp = &fd;
11622 /* We need two temporaries with fd.loop.v type (istart/iend)
11623 and then (fd.collapse - 1) temporaries with the same
11624 type for count2 ... countN-1 vars if not constant. */
11625 size_t count = 2;
11626 tree type = fd.iter_type;
11627 if (fd.collapse > 1
11628 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11629 count += fd.collapse - 1;
11630 size_t count2 = 0;
11631 tree type2 = NULL_TREE;
11632 bool taskreg_for
11633 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11634 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11635 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11636 tree simtc = NULL;
11637 tree clauses = *pc;
11638 if (fd.collapse > 1
11639 && fd.non_rect
11640 && fd.last_nonrect == fd.first_nonrect + 1
11641 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11642 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11643 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11645 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11646 type2 = TREE_TYPE (v);
11647 count++;
11648 count2 = 3;
11650 if (taskreg_for)
11651 outerc
11652 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11653 OMP_CLAUSE__LOOPTEMP_);
11654 if (ctx->simt_stmt)
11655 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11656 OMP_CLAUSE__LOOPTEMP_);
11657 for (i = 0; i < count + count2; i++)
11659 tree temp;
11660 if (taskreg_for)
11662 gcc_assert (outerc);
11663 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11664 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11665 OMP_CLAUSE__LOOPTEMP_);
11667 else
11669 /* If there are 2 adjacent SIMD stmts, one with _simt_
11670 clause, another without, make sure they have the same
11671 decls in _looptemp_ clauses, because the outer stmt
11672 they are combined into will look up just one inner_stmt. */
11673 if (ctx->simt_stmt)
11674 temp = OMP_CLAUSE_DECL (simtc);
11675 else
11676 temp = create_tmp_var (i >= count ? type2 : type);
11677 insert_decl_map (&ctx->outer->cb, temp, temp);
11679 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11680 OMP_CLAUSE_DECL (*pc) = temp;
11681 pc = &OMP_CLAUSE_CHAIN (*pc);
11682 if (ctx->simt_stmt)
11683 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11684 OMP_CLAUSE__LOOPTEMP_);
11686 *pc = clauses;
11689 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11690 dlist = NULL;
11691 body = NULL;
11692 tree rclauses
11693 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11694 OMP_CLAUSE_REDUCTION);
11695 tree rtmp = NULL_TREE;
11696 if (rclauses)
11698 tree type = build_pointer_type (pointer_sized_int_node);
11699 tree temp = create_tmp_var (type);
11700 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11701 OMP_CLAUSE_DECL (c) = temp;
11702 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11703 gimple_omp_for_set_clauses (stmt, c);
11704 lower_omp_task_reductions (ctx, OMP_FOR,
11705 gimple_omp_for_clauses (stmt),
11706 &tred_ilist, &tred_dlist);
11707 rclauses = c;
11708 rtmp = make_ssa_name (type);
11709 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11712 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11713 ctx);
11715 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11716 fdp);
11717 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11718 gimple_omp_for_pre_body (stmt));
11720 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11722 gcall *private_marker = NULL;
11723 if (is_gimple_omp_oacc (ctx->stmt)
11724 && !gimple_seq_empty_p (omp_for_body))
11725 private_marker = lower_oacc_private_marker (ctx);
11727 /* Lower the header expressions. At this point, we can assume that
11728 the header is of the form:
11730 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11732 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11733 using the .omp_data_s mapping, if needed. */
11734 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11736 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11737 if (TREE_CODE (*rhs_p) == TREE_VEC)
11739 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11740 TREE_VEC_ELT (*rhs_p, 1)
11741 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11742 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11743 TREE_VEC_ELT (*rhs_p, 2)
11744 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11746 else if (!is_gimple_min_invariant (*rhs_p))
11747 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11748 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11749 recompute_tree_invariant_for_addr_expr (*rhs_p);
11751 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11752 if (TREE_CODE (*rhs_p) == TREE_VEC)
11754 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11755 TREE_VEC_ELT (*rhs_p, 1)
11756 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11757 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11758 TREE_VEC_ELT (*rhs_p, 2)
11759 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11761 else if (!is_gimple_min_invariant (*rhs_p))
11762 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11763 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11764 recompute_tree_invariant_for_addr_expr (*rhs_p);
11766 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11767 if (!is_gimple_min_invariant (*rhs_p))
11768 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11770 if (rclauses)
11771 gimple_seq_add_seq (&tred_ilist, cnt_list);
11772 else
11773 gimple_seq_add_seq (&body, cnt_list);
11775 /* Once lowered, extract the bounds and clauses. */
11776 omp_extract_for_data (stmt, &fd, NULL);
11778 if (is_gimple_omp_oacc (ctx->stmt)
11779 && !ctx_in_oacc_kernels_region (ctx))
11780 lower_oacc_head_tail (gimple_location (stmt),
11781 gimple_omp_for_clauses (stmt), private_marker,
11782 &oacc_head, &oacc_tail, ctx);
11784 /* Add OpenACC partitioning and reduction markers just before the loop. */
11785 if (oacc_head)
11786 gimple_seq_add_seq (&body, oacc_head);
11788 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11790 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11791 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11792 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11793 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11795 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11796 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11797 OMP_CLAUSE_LINEAR_STEP (c)
11798 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11799 ctx);
11802 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11803 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11804 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11805 else
11807 gimple_seq_add_stmt (&body, stmt);
11808 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11811 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11812 fd.loop.v));
11814 /* After the loop, add exit clauses. */
11815 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11817 if (clist)
11819 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11820 gcall *g = gimple_build_call (fndecl, 0);
11821 gimple_seq_add_stmt (&body, g);
11822 gimple_seq_add_seq (&body, clist);
11823 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11824 g = gimple_build_call (fndecl, 0);
11825 gimple_seq_add_stmt (&body, g);
11828 if (ctx->cancellable)
11829 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11831 gimple_seq_add_seq (&body, dlist);
11833 if (rclauses)
11835 gimple_seq_add_seq (&tred_ilist, body);
11836 body = tred_ilist;
11839 body = maybe_catch_exception (body);
11841 /* Region exit marker goes at the end of the loop body. */
11842 gimple *g = gimple_build_omp_return (fd.have_nowait);
11843 gimple_seq_add_stmt (&body, g);
11845 gimple_seq_add_seq (&body, tred_dlist);
11847 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11849 if (rclauses)
11850 OMP_CLAUSE_DECL (rclauses) = rtmp;
11852 /* Add OpenACC joining and reduction markers just after the loop. */
11853 if (oacc_tail)
11854 gimple_seq_add_seq (&body, oacc_tail);
11856 pop_gimplify_context (new_stmt);
11858 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11859 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11860 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11861 if (BLOCK_VARS (block))
11862 TREE_USED (block) = 1;
11864 gimple_bind_set_body (new_stmt, body);
11865 gimple_omp_set_body (stmt, NULL);
11866 gimple_omp_for_set_pre_body (stmt, NULL);
11869 /* Callback for walk_stmts. Check if the current statement only contains
11870 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11872 static tree
11873 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11874 bool *handled_ops_p,
11875 struct walk_stmt_info *wi)
11877 int *info = (int *) wi->info;
11878 gimple *stmt = gsi_stmt (*gsi_p);
11880 *handled_ops_p = true;
11881 switch (gimple_code (stmt))
11883 WALK_SUBSTMTS;
11885 case GIMPLE_DEBUG:
11886 break;
11887 case GIMPLE_OMP_FOR:
11888 case GIMPLE_OMP_SECTIONS:
11889 *info = *info == 0 ? 1 : -1;
11890 break;
11891 default:
11892 *info = -1;
11893 break;
11895 return NULL;
11898 struct omp_taskcopy_context
11900 /* This field must be at the beginning, as we do "inheritance": Some
11901 callback functions for tree-inline.cc (e.g., omp_copy_decl)
11902 receive a copy_body_data pointer that is up-casted to an
11903 omp_context pointer. */
11904 copy_body_data cb;
11905 omp_context *ctx;
11908 static tree
11909 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11911 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11913 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11914 return create_tmp_var (TREE_TYPE (var));
11916 return var;
11919 static tree
11920 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11922 tree name, new_fields = NULL, type, f;
11924 type = lang_hooks.types.make_type (RECORD_TYPE);
11925 name = DECL_NAME (TYPE_NAME (orig_type));
11926 name = build_decl (gimple_location (tcctx->ctx->stmt),
11927 TYPE_DECL, name, type);
11928 TYPE_NAME (type) = name;
11930 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11932 tree new_f = copy_node (f);
11933 DECL_CONTEXT (new_f) = type;
11934 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11935 TREE_CHAIN (new_f) = new_fields;
11936 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11937 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11938 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11939 &tcctx->cb, NULL);
11940 new_fields = new_f;
11941 tcctx->cb.decl_map->put (f, new_f);
11943 TYPE_FIELDS (type) = nreverse (new_fields);
11944 layout_type (type);
11945 return type;
11948 /* Create task copyfn. */
11950 static void
11951 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11953 struct function *child_cfun;
11954 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11955 tree record_type, srecord_type, bind, list;
11956 bool record_needs_remap = false, srecord_needs_remap = false;
11957 splay_tree_node n;
11958 struct omp_taskcopy_context tcctx;
11959 location_t loc = gimple_location (task_stmt);
11960 size_t looptempno = 0;
11962 child_fn = gimple_omp_task_copy_fn (task_stmt);
11963 task_cpyfns.safe_push (task_stmt);
11964 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11965 gcc_assert (child_cfun->cfg == NULL);
11966 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11968 /* Reset DECL_CONTEXT on function arguments. */
11969 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11970 DECL_CONTEXT (t) = child_fn;
11972 /* Populate the function. */
11973 push_gimplify_context ();
11974 push_cfun (child_cfun);
11976 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11977 TREE_SIDE_EFFECTS (bind) = 1;
11978 list = NULL;
11979 DECL_SAVED_TREE (child_fn) = bind;
11980 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
11982 /* Remap src and dst argument types if needed. */
11983 record_type = ctx->record_type;
11984 srecord_type = ctx->srecord_type;
11985 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
11986 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11988 record_needs_remap = true;
11989 break;
11991 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
11992 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11994 srecord_needs_remap = true;
11995 break;
11998 if (record_needs_remap || srecord_needs_remap)
12000 memset (&tcctx, '\0', sizeof (tcctx));
12001 tcctx.cb.src_fn = ctx->cb.src_fn;
12002 tcctx.cb.dst_fn = child_fn;
12003 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
12004 gcc_checking_assert (tcctx.cb.src_node);
12005 tcctx.cb.dst_node = tcctx.cb.src_node;
12006 tcctx.cb.src_cfun = ctx->cb.src_cfun;
12007 tcctx.cb.copy_decl = task_copyfn_copy_decl;
12008 tcctx.cb.eh_lp_nr = 0;
12009 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
12010 tcctx.cb.decl_map = new hash_map<tree, tree>;
12011 tcctx.ctx = ctx;
12013 if (record_needs_remap)
12014 record_type = task_copyfn_remap_type (&tcctx, record_type);
12015 if (srecord_needs_remap)
12016 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
12018 else
12019 tcctx.cb.decl_map = NULL;
12021 arg = DECL_ARGUMENTS (child_fn);
12022 TREE_TYPE (arg) = build_pointer_type (record_type);
12023 sarg = DECL_CHAIN (arg);
12024 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
12026 /* First pass: initialize temporaries used in record_type and srecord_type
12027 sizes and field offsets. */
12028 if (tcctx.cb.decl_map)
12029 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12030 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12032 tree *p;
12034 decl = OMP_CLAUSE_DECL (c);
12035 p = tcctx.cb.decl_map->get (decl);
12036 if (p == NULL)
12037 continue;
12038 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12039 sf = (tree) n->value;
12040 sf = *tcctx.cb.decl_map->get (sf);
12041 src = build_simple_mem_ref_loc (loc, sarg);
12042 src = omp_build_component_ref (src, sf);
12043 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
12044 append_to_statement_list (t, &list);
12047 /* Second pass: copy shared var pointers and copy construct non-VLA
12048 firstprivate vars. */
12049 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12050 switch (OMP_CLAUSE_CODE (c))
12052 splay_tree_key key;
12053 case OMP_CLAUSE_SHARED:
12054 decl = OMP_CLAUSE_DECL (c);
12055 key = (splay_tree_key) decl;
12056 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
12057 key = (splay_tree_key) &DECL_UID (decl);
12058 n = splay_tree_lookup (ctx->field_map, key);
12059 if (n == NULL)
12060 break;
12061 f = (tree) n->value;
12062 if (tcctx.cb.decl_map)
12063 f = *tcctx.cb.decl_map->get (f);
12064 n = splay_tree_lookup (ctx->sfield_map, key);
12065 sf = (tree) n->value;
12066 if (tcctx.cb.decl_map)
12067 sf = *tcctx.cb.decl_map->get (sf);
12068 src = build_simple_mem_ref_loc (loc, sarg);
12069 src = omp_build_component_ref (src, sf);
12070 dst = build_simple_mem_ref_loc (loc, arg);
12071 dst = omp_build_component_ref (dst, f);
12072 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12073 append_to_statement_list (t, &list);
12074 break;
12075 case OMP_CLAUSE_REDUCTION:
12076 case OMP_CLAUSE_IN_REDUCTION:
12077 decl = OMP_CLAUSE_DECL (c);
12078 if (TREE_CODE (decl) == MEM_REF)
12080 decl = TREE_OPERAND (decl, 0);
12081 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12082 decl = TREE_OPERAND (decl, 0);
12083 if (TREE_CODE (decl) == INDIRECT_REF
12084 || TREE_CODE (decl) == ADDR_EXPR)
12085 decl = TREE_OPERAND (decl, 0);
12087 key = (splay_tree_key) decl;
12088 n = splay_tree_lookup (ctx->field_map, key);
12089 if (n == NULL)
12090 break;
12091 f = (tree) n->value;
12092 if (tcctx.cb.decl_map)
12093 f = *tcctx.cb.decl_map->get (f);
12094 n = splay_tree_lookup (ctx->sfield_map, key);
12095 sf = (tree) n->value;
12096 if (tcctx.cb.decl_map)
12097 sf = *tcctx.cb.decl_map->get (sf);
12098 src = build_simple_mem_ref_loc (loc, sarg);
12099 src = omp_build_component_ref (src, sf);
12100 if (decl != OMP_CLAUSE_DECL (c)
12101 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12102 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12103 src = build_simple_mem_ref_loc (loc, src);
12104 dst = build_simple_mem_ref_loc (loc, arg);
12105 dst = omp_build_component_ref (dst, f);
12106 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12107 append_to_statement_list (t, &list);
12108 break;
12109 case OMP_CLAUSE__LOOPTEMP_:
12110 /* Fields for first two _looptemp_ clauses are initialized by
12111 GOMP_taskloop*, the rest are handled like firstprivate. */
12112 if (looptempno < 2)
12114 looptempno++;
12115 break;
12117 /* FALLTHRU */
12118 case OMP_CLAUSE__REDUCTEMP_:
12119 case OMP_CLAUSE_FIRSTPRIVATE:
12120 decl = OMP_CLAUSE_DECL (c);
12121 if (is_variable_sized (decl))
12122 break;
12123 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12124 if (n == NULL)
12125 break;
12126 f = (tree) n->value;
12127 if (tcctx.cb.decl_map)
12128 f = *tcctx.cb.decl_map->get (f);
12129 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12130 if (n != NULL)
12132 sf = (tree) n->value;
12133 if (tcctx.cb.decl_map)
12134 sf = *tcctx.cb.decl_map->get (sf);
12135 src = build_simple_mem_ref_loc (loc, sarg);
12136 src = omp_build_component_ref (src, sf);
12137 if (use_pointer_for_field (decl, NULL)
12138 || omp_privatize_by_reference (decl))
12139 src = build_simple_mem_ref_loc (loc, src);
12141 else
12142 src = decl;
12143 dst = build_simple_mem_ref_loc (loc, arg);
12144 dst = omp_build_component_ref (dst, f);
12145 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12146 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12147 else
12149 if (ctx->allocate_map)
12150 if (tree *allocatorp = ctx->allocate_map->get (decl))
12152 tree allocator = *allocatorp;
12153 HOST_WIDE_INT ialign = 0;
12154 if (TREE_CODE (allocator) == TREE_LIST)
12156 ialign = tree_to_uhwi (TREE_VALUE (allocator));
12157 allocator = TREE_PURPOSE (allocator);
12159 if (TREE_CODE (allocator) != INTEGER_CST)
12161 n = splay_tree_lookup (ctx->sfield_map,
12162 (splay_tree_key) allocator);
12163 allocator = (tree) n->value;
12164 if (tcctx.cb.decl_map)
12165 allocator = *tcctx.cb.decl_map->get (allocator);
12166 tree a = build_simple_mem_ref_loc (loc, sarg);
12167 allocator = omp_build_component_ref (a, allocator);
12169 allocator = fold_convert (pointer_sized_int_node, allocator);
12170 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12171 tree align = build_int_cst (size_type_node,
12172 MAX (ialign,
12173 DECL_ALIGN_UNIT (decl)));
12174 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12175 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12176 allocator);
12177 ptr = fold_convert (TREE_TYPE (dst), ptr);
12178 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12179 append_to_statement_list (t, &list);
12180 dst = build_simple_mem_ref_loc (loc, dst);
12182 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12184 append_to_statement_list (t, &list);
12185 break;
12186 case OMP_CLAUSE_PRIVATE:
12187 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12188 break;
12189 decl = OMP_CLAUSE_DECL (c);
12190 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12191 f = (tree) n->value;
12192 if (tcctx.cb.decl_map)
12193 f = *tcctx.cb.decl_map->get (f);
12194 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12195 if (n != NULL)
12197 sf = (tree) n->value;
12198 if (tcctx.cb.decl_map)
12199 sf = *tcctx.cb.decl_map->get (sf);
12200 src = build_simple_mem_ref_loc (loc, sarg);
12201 src = omp_build_component_ref (src, sf);
12202 if (use_pointer_for_field (decl, NULL))
12203 src = build_simple_mem_ref_loc (loc, src);
12205 else
12206 src = decl;
12207 dst = build_simple_mem_ref_loc (loc, arg);
12208 dst = omp_build_component_ref (dst, f);
12209 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12210 append_to_statement_list (t, &list);
12211 break;
12212 default:
12213 break;
12216 /* Last pass: handle VLA firstprivates. */
12217 if (tcctx.cb.decl_map)
12218 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12219 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12221 tree ind, ptr, df;
12223 decl = OMP_CLAUSE_DECL (c);
12224 if (!is_variable_sized (decl))
12225 continue;
12226 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12227 if (n == NULL)
12228 continue;
12229 f = (tree) n->value;
12230 f = *tcctx.cb.decl_map->get (f);
12231 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12232 ind = DECL_VALUE_EXPR (decl);
12233 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12234 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12235 n = splay_tree_lookup (ctx->sfield_map,
12236 (splay_tree_key) TREE_OPERAND (ind, 0));
12237 sf = (tree) n->value;
12238 sf = *tcctx.cb.decl_map->get (sf);
12239 src = build_simple_mem_ref_loc (loc, sarg);
12240 src = omp_build_component_ref (src, sf);
12241 src = build_simple_mem_ref_loc (loc, src);
12242 dst = build_simple_mem_ref_loc (loc, arg);
12243 dst = omp_build_component_ref (dst, f);
12244 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12245 append_to_statement_list (t, &list);
12246 n = splay_tree_lookup (ctx->field_map,
12247 (splay_tree_key) TREE_OPERAND (ind, 0));
12248 df = (tree) n->value;
12249 df = *tcctx.cb.decl_map->get (df);
12250 ptr = build_simple_mem_ref_loc (loc, arg);
12251 ptr = omp_build_component_ref (ptr, df);
12252 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12253 build_fold_addr_expr_loc (loc, dst));
12254 append_to_statement_list (t, &list);
12257 t = build1 (RETURN_EXPR, void_type_node, NULL);
12258 append_to_statement_list (t, &list);
12260 if (tcctx.cb.decl_map)
12261 delete tcctx.cb.decl_map;
12262 pop_gimplify_context (NULL);
12263 BIND_EXPR_BODY (bind) = list;
12264 pop_cfun ();
12267 static void
12268 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12270 tree c, clauses;
12271 gimple *g;
12272 size_t cnt[5] = { 0, 0, 0, 0, 0 }, idx = 2, i;
12274 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12275 gcc_assert (clauses);
12276 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12277 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12278 switch (OMP_CLAUSE_DEPEND_KIND (c))
12280 case OMP_CLAUSE_DEPEND_LAST:
12281 /* Lowering already done at gimplification. */
12282 return;
12283 case OMP_CLAUSE_DEPEND_IN:
12284 cnt[2]++;
12285 break;
12286 case OMP_CLAUSE_DEPEND_OUT:
12287 case OMP_CLAUSE_DEPEND_INOUT:
12288 cnt[0]++;
12289 break;
12290 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12291 cnt[1]++;
12292 break;
12293 case OMP_CLAUSE_DEPEND_DEPOBJ:
12294 cnt[3]++;
12295 break;
12296 case OMP_CLAUSE_DEPEND_INOUTSET:
12297 cnt[4]++;
12298 break;
12299 default:
12300 gcc_unreachable ();
12302 if (cnt[1] || cnt[3] || cnt[4])
12303 idx = 5;
12304 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3] + cnt[4];
12305 size_t inoutidx = total + idx;
12306 tree type = build_array_type_nelts (ptr_type_node, total + idx + 2 * cnt[4]);
12307 tree array = create_tmp_var (type);
12308 TREE_ADDRESSABLE (array) = 1;
12309 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12310 NULL_TREE);
12311 if (idx == 5)
12313 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12314 gimple_seq_add_stmt (iseq, g);
12315 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12316 NULL_TREE);
12318 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12319 gimple_seq_add_stmt (iseq, g);
12320 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12322 r = build4 (ARRAY_REF, ptr_type_node, array,
12323 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12324 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12325 gimple_seq_add_stmt (iseq, g);
12327 for (i = 0; i < 5; i++)
12329 if (cnt[i] == 0)
12330 continue;
12331 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12332 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12333 continue;
12334 else
12336 switch (OMP_CLAUSE_DEPEND_KIND (c))
12338 case OMP_CLAUSE_DEPEND_IN:
12339 if (i != 2)
12340 continue;
12341 break;
12342 case OMP_CLAUSE_DEPEND_OUT:
12343 case OMP_CLAUSE_DEPEND_INOUT:
12344 if (i != 0)
12345 continue;
12346 break;
12347 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12348 if (i != 1)
12349 continue;
12350 break;
12351 case OMP_CLAUSE_DEPEND_DEPOBJ:
12352 if (i != 3)
12353 continue;
12354 break;
12355 case OMP_CLAUSE_DEPEND_INOUTSET:
12356 if (i != 4)
12357 continue;
12358 break;
12359 default:
12360 gcc_unreachable ();
12362 tree t = OMP_CLAUSE_DECL (c);
12363 if (i == 4)
12365 t = build4 (ARRAY_REF, ptr_type_node, array,
12366 size_int (inoutidx), NULL_TREE, NULL_TREE);
12367 t = build_fold_addr_expr (t);
12368 inoutidx += 2;
12370 t = fold_convert (ptr_type_node, t);
12371 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12372 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12373 NULL_TREE, NULL_TREE);
12374 g = gimple_build_assign (r, t);
12375 gimple_seq_add_stmt (iseq, g);
12378 if (cnt[4])
12379 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12380 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12381 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_INOUTSET)
12383 tree t = OMP_CLAUSE_DECL (c);
12384 t = fold_convert (ptr_type_node, t);
12385 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12386 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12387 NULL_TREE, NULL_TREE);
12388 g = gimple_build_assign (r, t);
12389 gimple_seq_add_stmt (iseq, g);
12390 t = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
12391 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12392 NULL_TREE, NULL_TREE);
12393 g = gimple_build_assign (r, t);
12394 gimple_seq_add_stmt (iseq, g);
12397 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12398 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12399 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12400 OMP_CLAUSE_CHAIN (c) = *pclauses;
12401 *pclauses = c;
12402 tree clobber = build_clobber (type);
12403 g = gimple_build_assign (array, clobber);
12404 gimple_seq_add_stmt (oseq, g);
12407 /* Lower the OpenMP parallel or task directive in the current statement
12408 in GSI_P. CTX holds context information for the directive. */
12410 static void
12411 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12413 tree clauses;
12414 tree child_fn, t;
12415 gimple *stmt = gsi_stmt (*gsi_p);
12416 gbind *par_bind, *bind, *dep_bind = NULL;
12417 gimple_seq par_body;
12418 location_t loc = gimple_location (stmt);
12420 clauses = gimple_omp_taskreg_clauses (stmt);
12421 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12422 && gimple_omp_task_taskwait_p (stmt))
12424 par_bind = NULL;
12425 par_body = NULL;
12427 else
12429 par_bind
12430 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12431 par_body = gimple_bind_body (par_bind);
12433 child_fn = ctx->cb.dst_fn;
12434 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12435 && !gimple_omp_parallel_combined_p (stmt))
12437 struct walk_stmt_info wi;
12438 int ws_num = 0;
12440 memset (&wi, 0, sizeof (wi));
12441 wi.info = &ws_num;
12442 wi.val_only = true;
12443 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12444 if (ws_num == 1)
12445 gimple_omp_parallel_set_combined_p (stmt, true);
12447 gimple_seq dep_ilist = NULL;
12448 gimple_seq dep_olist = NULL;
12449 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12450 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12452 push_gimplify_context ();
12453 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12454 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12455 &dep_ilist, &dep_olist);
12458 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12459 && gimple_omp_task_taskwait_p (stmt))
12461 if (dep_bind)
12463 gsi_replace (gsi_p, dep_bind, true);
12464 gimple_bind_add_seq (dep_bind, dep_ilist);
12465 gimple_bind_add_stmt (dep_bind, stmt);
12466 gimple_bind_add_seq (dep_bind, dep_olist);
12467 pop_gimplify_context (dep_bind);
12469 return;
12472 if (ctx->srecord_type)
12473 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12475 gimple_seq tskred_ilist = NULL;
12476 gimple_seq tskred_olist = NULL;
12477 if ((is_task_ctx (ctx)
12478 && gimple_omp_task_taskloop_p (ctx->stmt)
12479 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12480 OMP_CLAUSE_REDUCTION))
12481 || (is_parallel_ctx (ctx)
12482 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12483 OMP_CLAUSE__REDUCTEMP_)))
12485 if (dep_bind == NULL)
12487 push_gimplify_context ();
12488 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12490 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12491 : OMP_PARALLEL,
12492 gimple_omp_taskreg_clauses (ctx->stmt),
12493 &tskred_ilist, &tskred_olist);
12496 push_gimplify_context ();
12498 gimple_seq par_olist = NULL;
12499 gimple_seq par_ilist = NULL;
12500 gimple_seq par_rlist = NULL;
12501 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12502 lower_omp (&par_body, ctx);
12503 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12504 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12506 /* Declare all the variables created by mapping and the variables
12507 declared in the scope of the parallel body. */
12508 record_vars_into (ctx->block_vars, child_fn);
12509 maybe_remove_omp_member_access_dummy_vars (par_bind);
12510 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12512 if (ctx->record_type)
12514 ctx->sender_decl
12515 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12516 : ctx->record_type, ".omp_data_o");
12517 DECL_NAMELESS (ctx->sender_decl) = 1;
12518 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12519 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12522 gimple_seq olist = NULL;
12523 gimple_seq ilist = NULL;
12524 lower_send_clauses (clauses, &ilist, &olist, ctx);
12525 lower_send_shared_vars (&ilist, &olist, ctx);
12527 if (ctx->record_type)
12529 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12530 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12531 clobber));
12534 /* Once all the expansions are done, sequence all the different
12535 fragments inside gimple_omp_body. */
12537 gimple_seq new_body = NULL;
12539 if (ctx->record_type)
12541 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12542 /* fixup_child_record_type might have changed receiver_decl's type. */
12543 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12544 gimple_seq_add_stmt (&new_body,
12545 gimple_build_assign (ctx->receiver_decl, t));
12548 gimple_seq_add_seq (&new_body, par_ilist);
12549 gimple_seq_add_seq (&new_body, par_body);
12550 gimple_seq_add_seq (&new_body, par_rlist);
12551 if (ctx->cancellable)
12552 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12553 gimple_seq_add_seq (&new_body, par_olist);
12554 new_body = maybe_catch_exception (new_body);
12555 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12556 gimple_seq_add_stmt (&new_body,
12557 gimple_build_omp_continue (integer_zero_node,
12558 integer_zero_node));
12559 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12560 gimple_omp_set_body (stmt, new_body);
12562 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12563 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12564 else
12565 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12566 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12567 gimple_bind_add_seq (bind, ilist);
12568 gimple_bind_add_stmt (bind, stmt);
12569 gimple_bind_add_seq (bind, olist);
12571 pop_gimplify_context (NULL);
12573 if (dep_bind)
12575 gimple_bind_add_seq (dep_bind, dep_ilist);
12576 gimple_bind_add_seq (dep_bind, tskred_ilist);
12577 gimple_bind_add_stmt (dep_bind, bind);
12578 gimple_bind_add_seq (dep_bind, tskred_olist);
12579 gimple_bind_add_seq (dep_bind, dep_olist);
12580 pop_gimplify_context (dep_bind);
12584 /* Lower the GIMPLE_OMP_TARGET in the current statement
12585 in GSI_P. CTX holds context information for the directive. */
12587 static void
12588 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12590 tree clauses;
12591 tree child_fn, t, c;
12592 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12593 gbind *tgt_bind, *bind, *dep_bind = NULL;
12594 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12595 location_t loc = gimple_location (stmt);
12596 bool offloaded, data_region;
12597 unsigned int map_cnt = 0;
12598 tree in_reduction_clauses = NULL_TREE;
12600 offloaded = is_gimple_omp_offloaded (stmt);
12601 switch (gimple_omp_target_kind (stmt))
12603 case GF_OMP_TARGET_KIND_REGION:
12604 tree *p, *q;
12605 q = &in_reduction_clauses;
12606 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12607 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12609 *q = *p;
12610 q = &OMP_CLAUSE_CHAIN (*q);
12611 *p = OMP_CLAUSE_CHAIN (*p);
12613 else
12614 p = &OMP_CLAUSE_CHAIN (*p);
12615 *q = NULL_TREE;
12616 *p = in_reduction_clauses;
12617 /* FALLTHRU */
12618 case GF_OMP_TARGET_KIND_UPDATE:
12619 case GF_OMP_TARGET_KIND_ENTER_DATA:
12620 case GF_OMP_TARGET_KIND_EXIT_DATA:
12621 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12622 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12623 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12624 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12625 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12626 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12627 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12628 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12629 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12630 data_region = false;
12631 break;
12632 case GF_OMP_TARGET_KIND_DATA:
12633 case GF_OMP_TARGET_KIND_OACC_DATA:
12634 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12635 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12636 data_region = true;
12637 break;
12638 default:
12639 gcc_unreachable ();
12642 /* Ensure that requires map is written via output_offload_tables, even if only
12643 'target (enter/exit) data' is used in the translation unit. */
12644 if (ENABLE_OFFLOADING && (omp_requires_mask & OMP_REQUIRES_TARGET_USED))
12645 g->have_offload = true;
12647 clauses = gimple_omp_target_clauses (stmt);
12649 gimple_seq dep_ilist = NULL;
12650 gimple_seq dep_olist = NULL;
12651 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12652 if (has_depend || in_reduction_clauses)
12654 push_gimplify_context ();
12655 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12656 if (has_depend)
12657 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12658 &dep_ilist, &dep_olist);
12659 if (in_reduction_clauses)
12660 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12661 ctx, NULL);
12664 tgt_bind = NULL;
12665 tgt_body = NULL;
12666 if (offloaded)
12668 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12669 tgt_body = gimple_bind_body (tgt_bind);
12671 else if (data_region)
12672 tgt_body = gimple_omp_body (stmt);
12673 child_fn = ctx->cb.dst_fn;
12675 push_gimplify_context ();
12676 fplist = NULL;
12678 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12679 switch (OMP_CLAUSE_CODE (c))
12681 tree var, x;
12683 default:
12684 break;
12685 case OMP_CLAUSE_MAP:
12686 #if CHECKING_P
12687 /* First check what we're prepared to handle in the following. */
12688 switch (OMP_CLAUSE_MAP_KIND (c))
12690 case GOMP_MAP_ALLOC:
12691 case GOMP_MAP_TO:
12692 case GOMP_MAP_FROM:
12693 case GOMP_MAP_TOFROM:
12694 case GOMP_MAP_POINTER:
12695 case GOMP_MAP_TO_PSET:
12696 case GOMP_MAP_DELETE:
12697 case GOMP_MAP_RELEASE:
12698 case GOMP_MAP_ALWAYS_TO:
12699 case GOMP_MAP_ALWAYS_FROM:
12700 case GOMP_MAP_ALWAYS_TOFROM:
12701 case GOMP_MAP_FORCE_PRESENT:
12702 case GOMP_MAP_ALWAYS_PRESENT_FROM:
12703 case GOMP_MAP_ALWAYS_PRESENT_TO:
12704 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
12706 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12707 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12708 case GOMP_MAP_STRUCT:
12709 case GOMP_MAP_ALWAYS_POINTER:
12710 case GOMP_MAP_ATTACH:
12711 case GOMP_MAP_DETACH:
12712 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
12713 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
12714 break;
12715 case GOMP_MAP_IF_PRESENT:
12716 case GOMP_MAP_FORCE_ALLOC:
12717 case GOMP_MAP_FORCE_TO:
12718 case GOMP_MAP_FORCE_FROM:
12719 case GOMP_MAP_FORCE_TOFROM:
12720 case GOMP_MAP_FORCE_DEVICEPTR:
12721 case GOMP_MAP_DEVICE_RESIDENT:
12722 case GOMP_MAP_LINK:
12723 case GOMP_MAP_FORCE_DETACH:
12724 gcc_assert (is_gimple_omp_oacc (stmt));
12725 break;
12726 default:
12727 gcc_unreachable ();
12729 #endif
12730 /* FALLTHRU */
12731 case OMP_CLAUSE_TO:
12732 case OMP_CLAUSE_FROM:
12733 oacc_firstprivate:
12734 var = OMP_CLAUSE_DECL (c);
12735 if (!DECL_P (var))
12737 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12738 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12739 && (OMP_CLAUSE_MAP_KIND (c)
12740 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12741 map_cnt++;
12742 continue;
12745 if (DECL_SIZE (var)
12746 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12748 tree var2 = DECL_VALUE_EXPR (var);
12749 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12750 var2 = TREE_OPERAND (var2, 0);
12751 gcc_assert (DECL_P (var2));
12752 var = var2;
12755 if (offloaded
12756 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12757 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12758 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12760 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12762 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12763 && varpool_node::get_create (var)->offloadable)
12764 continue;
12766 tree type = build_pointer_type (TREE_TYPE (var));
12767 tree new_var = lookup_decl (var, ctx);
12768 x = create_tmp_var_raw (type, get_name (new_var));
12769 gimple_add_tmp_var (x);
12770 x = build_simple_mem_ref (x);
12771 SET_DECL_VALUE_EXPR (new_var, x);
12772 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12774 continue;
12777 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12778 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12779 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12780 && is_omp_target (stmt))
12782 gcc_assert (maybe_lookup_field (c, ctx));
12783 map_cnt++;
12784 continue;
12787 if (!maybe_lookup_field (var, ctx))
12788 continue;
12790 /* Don't remap compute constructs' reduction variables, because the
12791 intermediate result must be local to each gang. */
12792 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12793 && is_gimple_omp_oacc (ctx->stmt)
12794 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12796 x = build_receiver_ref (var, true, ctx);
12797 tree new_var = lookup_decl (var, ctx);
12799 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12800 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12801 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12802 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12803 x = build_simple_mem_ref (x);
12804 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12806 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12807 if (omp_privatize_by_reference (new_var)
12808 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12809 || DECL_BY_REFERENCE (var)))
12811 /* Create a local object to hold the instance
12812 value. */
12813 tree type = TREE_TYPE (TREE_TYPE (new_var));
12814 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12815 tree inst = create_tmp_var (type, id);
12816 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12817 x = build_fold_addr_expr (inst);
12819 gimplify_assign (new_var, x, &fplist);
12821 else if (DECL_P (new_var))
12823 SET_DECL_VALUE_EXPR (new_var, x);
12824 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12826 else
12827 gcc_unreachable ();
12829 map_cnt++;
12830 break;
12832 case OMP_CLAUSE_FIRSTPRIVATE:
12833 omp_firstprivate_recv:
12834 gcc_checking_assert (offloaded);
12835 if (is_gimple_omp_oacc (ctx->stmt))
12837 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12838 gcc_checking_assert (!is_oacc_kernels (ctx));
12839 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12840 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12842 goto oacc_firstprivate;
12844 map_cnt++;
12845 var = OMP_CLAUSE_DECL (c);
12846 if (!omp_privatize_by_reference (var)
12847 && !is_gimple_reg_type (TREE_TYPE (var)))
12849 tree new_var = lookup_decl (var, ctx);
12850 if (is_variable_sized (var))
12852 tree pvar = DECL_VALUE_EXPR (var);
12853 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12854 pvar = TREE_OPERAND (pvar, 0);
12855 gcc_assert (DECL_P (pvar));
12856 tree new_pvar = lookup_decl (pvar, ctx);
12857 x = build_fold_indirect_ref (new_pvar);
12858 TREE_THIS_NOTRAP (x) = 1;
12860 else
12861 x = build_receiver_ref (var, true, ctx);
12862 SET_DECL_VALUE_EXPR (new_var, x);
12863 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12865 /* Fortran array descriptors: firstprivate of data + attach. */
12866 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
12867 && lang_hooks.decls.omp_array_data (var, true))
12868 map_cnt += 2;
12869 break;
12871 case OMP_CLAUSE_PRIVATE:
12872 gcc_checking_assert (offloaded);
12873 if (is_gimple_omp_oacc (ctx->stmt))
12875 /* No 'private' clauses on OpenACC 'kernels'. */
12876 gcc_checking_assert (!is_oacc_kernels (ctx));
12877 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12878 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12880 break;
12882 var = OMP_CLAUSE_DECL (c);
12883 if (is_variable_sized (var))
12885 tree new_var = lookup_decl (var, ctx);
12886 tree pvar = DECL_VALUE_EXPR (var);
12887 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12888 pvar = TREE_OPERAND (pvar, 0);
12889 gcc_assert (DECL_P (pvar));
12890 tree new_pvar = lookup_decl (pvar, ctx);
12891 x = build_fold_indirect_ref (new_pvar);
12892 TREE_THIS_NOTRAP (x) = 1;
12893 SET_DECL_VALUE_EXPR (new_var, x);
12894 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12896 break;
12898 case OMP_CLAUSE_USE_DEVICE_PTR:
12899 case OMP_CLAUSE_USE_DEVICE_ADDR:
12900 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12901 case OMP_CLAUSE_IS_DEVICE_PTR:
12902 var = OMP_CLAUSE_DECL (c);
12903 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
12905 while (TREE_CODE (var) == INDIRECT_REF
12906 || TREE_CODE (var) == ARRAY_REF)
12907 var = TREE_OPERAND (var, 0);
12908 if (lang_hooks.decls.omp_array_data (var, true))
12909 goto omp_firstprivate_recv;
12911 map_cnt++;
12912 if (is_variable_sized (var))
12914 tree new_var = lookup_decl (var, ctx);
12915 tree pvar = DECL_VALUE_EXPR (var);
12916 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12917 pvar = TREE_OPERAND (pvar, 0);
12918 gcc_assert (DECL_P (pvar));
12919 tree new_pvar = lookup_decl (pvar, ctx);
12920 x = build_fold_indirect_ref (new_pvar);
12921 TREE_THIS_NOTRAP (x) = 1;
12922 SET_DECL_VALUE_EXPR (new_var, x);
12923 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12925 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12926 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
12927 && !omp_privatize_by_reference (var)
12928 && !omp_is_allocatable_or_ptr (var)
12929 && !lang_hooks.decls.omp_array_data (var, true))
12930 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12932 tree new_var = lookup_decl (var, ctx);
12933 tree type = build_pointer_type (TREE_TYPE (var));
12934 x = create_tmp_var_raw (type, get_name (new_var));
12935 gimple_add_tmp_var (x);
12936 x = build_simple_mem_ref (x);
12937 SET_DECL_VALUE_EXPR (new_var, x);
12938 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12940 else
12942 tree new_var = lookup_decl (var, ctx);
12943 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12944 gimple_add_tmp_var (x);
12945 SET_DECL_VALUE_EXPR (new_var, x);
12946 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12948 break;
12951 if (offloaded)
12953 target_nesting_level++;
12954 lower_omp (&tgt_body, ctx);
12955 target_nesting_level--;
12957 else if (data_region)
12958 lower_omp (&tgt_body, ctx);
12960 if (offloaded)
12962 /* Declare all the variables created by mapping and the variables
12963 declared in the scope of the target body. */
12964 record_vars_into (ctx->block_vars, child_fn);
12965 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
12966 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
12969 olist = NULL;
12970 ilist = NULL;
12971 if (ctx->record_type)
12973 ctx->sender_decl
12974 = create_tmp_var (ctx->record_type, ".omp_data_arr");
12975 DECL_NAMELESS (ctx->sender_decl) = 1;
12976 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12977 t = make_tree_vec (3);
12978 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12979 TREE_VEC_ELT (t, 1)
12980 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12981 ".omp_data_sizes");
12982 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12983 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12984 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12985 tree tkind_type = short_unsigned_type_node;
12986 int talign_shift = 8;
12987 TREE_VEC_ELT (t, 2)
12988 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12989 ".omp_data_kinds");
12990 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12991 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12992 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12993 gimple_omp_target_set_data_arg (stmt, t);
12995 vec<constructor_elt, va_gc> *vsize;
12996 vec<constructor_elt, va_gc> *vkind;
12997 vec_alloc (vsize, map_cnt);
12998 vec_alloc (vkind, map_cnt);
12999 unsigned int map_idx = 0;
13001 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13002 switch (OMP_CLAUSE_CODE (c))
13004 tree ovar, nc, s, purpose, var, x, type;
13005 unsigned int talign;
13007 default:
13008 break;
13010 case OMP_CLAUSE_MAP:
13011 case OMP_CLAUSE_TO:
13012 case OMP_CLAUSE_FROM:
13013 oacc_firstprivate_map:
13014 nc = c;
13015 ovar = OMP_CLAUSE_DECL (c);
13016 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13017 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13018 || (OMP_CLAUSE_MAP_KIND (c)
13019 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13020 break;
13021 if (!DECL_P (ovar))
13023 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13024 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
13026 nc = OMP_CLAUSE_CHAIN (c);
13027 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
13028 == get_base_address (ovar));
13029 ovar = OMP_CLAUSE_DECL (nc);
13031 else
13033 tree x = build_sender_ref (ovar, ctx);
13034 tree v = ovar;
13035 if (in_reduction_clauses
13036 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13037 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13039 v = unshare_expr (v);
13040 tree *p = &v;
13041 while (handled_component_p (*p)
13042 || TREE_CODE (*p) == INDIRECT_REF
13043 || TREE_CODE (*p) == ADDR_EXPR
13044 || TREE_CODE (*p) == MEM_REF
13045 || TREE_CODE (*p) == NON_LVALUE_EXPR)
13046 p = &TREE_OPERAND (*p, 0);
13047 tree d = *p;
13048 if (is_variable_sized (d))
13050 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13051 d = DECL_VALUE_EXPR (d);
13052 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13053 d = TREE_OPERAND (d, 0);
13054 gcc_assert (DECL_P (d));
13056 splay_tree_key key
13057 = (splay_tree_key) &DECL_CONTEXT (d);
13058 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13059 key)->value;
13060 if (d == *p)
13061 *p = nd;
13062 else
13063 *p = build_fold_indirect_ref (nd);
13065 v = build_fold_addr_expr_with_type (v, ptr_type_node);
13066 gimplify_assign (x, v, &ilist);
13067 nc = NULL_TREE;
13070 else
13072 if (DECL_SIZE (ovar)
13073 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
13075 tree ovar2 = DECL_VALUE_EXPR (ovar);
13076 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
13077 ovar2 = TREE_OPERAND (ovar2, 0);
13078 gcc_assert (DECL_P (ovar2));
13079 ovar = ovar2;
13081 if (!maybe_lookup_field (ovar, ctx)
13082 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13083 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13084 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
13085 continue;
13088 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
13089 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
13090 talign = DECL_ALIGN_UNIT (ovar);
13092 var = NULL_TREE;
13093 if (nc)
13095 if (in_reduction_clauses
13096 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13097 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13099 tree d = ovar;
13100 if (is_variable_sized (d))
13102 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13103 d = DECL_VALUE_EXPR (d);
13104 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13105 d = TREE_OPERAND (d, 0);
13106 gcc_assert (DECL_P (d));
13108 splay_tree_key key
13109 = (splay_tree_key) &DECL_CONTEXT (d);
13110 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13111 key)->value;
13112 if (d == ovar)
13113 var = nd;
13114 else
13115 var = build_fold_indirect_ref (nd);
13117 else
13118 var = lookup_decl_in_outer_ctx (ovar, ctx);
13120 if (nc
13121 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13122 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13123 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13124 && is_omp_target (stmt))
13126 x = build_sender_ref (c, ctx);
13127 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13129 else if (nc)
13131 x = build_sender_ref (ovar, ctx);
13133 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13134 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13135 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13136 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13138 gcc_assert (offloaded);
13139 tree avar
13140 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13141 mark_addressable (avar);
13142 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13143 talign = DECL_ALIGN_UNIT (avar);
13144 avar = build_fold_addr_expr (avar);
13145 gimplify_assign (x, avar, &ilist);
13147 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13149 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13150 if (!omp_privatize_by_reference (var))
13152 if (is_gimple_reg (var)
13153 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13154 suppress_warning (var);
13155 var = build_fold_addr_expr (var);
13157 else
13158 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13159 gimplify_assign (x, var, &ilist);
13161 else if (is_gimple_reg (var))
13163 gcc_assert (offloaded);
13164 tree avar = create_tmp_var (TREE_TYPE (var));
13165 mark_addressable (avar);
13166 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13167 if (GOMP_MAP_COPY_TO_P (map_kind)
13168 || map_kind == GOMP_MAP_POINTER
13169 || map_kind == GOMP_MAP_TO_PSET
13170 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13172 /* If we need to initialize a temporary
13173 with VAR because it is not addressable, and
13174 the variable hasn't been initialized yet, then
13175 we'll get a warning for the store to avar.
13176 Don't warn in that case, the mapping might
13177 be implicit. */
13178 suppress_warning (var, OPT_Wuninitialized);
13179 gimplify_assign (avar, var, &ilist);
13181 avar = build_fold_addr_expr (avar);
13182 gimplify_assign (x, avar, &ilist);
13183 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13184 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13185 && !TYPE_READONLY (TREE_TYPE (var)))
13187 x = unshare_expr (x);
13188 x = build_simple_mem_ref (x);
13189 gimplify_assign (var, x, &olist);
13192 else
13194 /* While MAP is handled explicitly by the FE,
13195 for 'target update', only the identified is passed. */
13196 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13197 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13198 && (omp_is_allocatable_or_ptr (var)
13199 && omp_check_optional_argument (var, false)))
13200 var = build_fold_indirect_ref (var);
13201 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13202 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13203 || (!omp_is_allocatable_or_ptr (var)
13204 && !omp_check_optional_argument (var, false)))
13205 var = build_fold_addr_expr (var);
13206 gimplify_assign (x, var, &ilist);
13209 s = NULL_TREE;
13210 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13212 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13213 s = TREE_TYPE (ovar);
13214 if (TREE_CODE (s) == REFERENCE_TYPE
13215 || omp_check_optional_argument (ovar, false))
13216 s = TREE_TYPE (s);
13217 s = TYPE_SIZE_UNIT (s);
13219 else
13220 s = OMP_CLAUSE_SIZE (c);
13221 if (s == NULL_TREE)
13222 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13223 s = fold_convert (size_type_node, s);
13224 purpose = size_int (map_idx++);
13225 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13226 if (TREE_CODE (s) != INTEGER_CST)
13227 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13229 unsigned HOST_WIDE_INT tkind, tkind_zero;
13230 switch (OMP_CLAUSE_CODE (c))
13232 case OMP_CLAUSE_MAP:
13233 tkind = OMP_CLAUSE_MAP_KIND (c);
13234 tkind_zero = tkind;
13235 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13236 switch (tkind)
13238 case GOMP_MAP_ALLOC:
13239 case GOMP_MAP_IF_PRESENT:
13240 case GOMP_MAP_TO:
13241 case GOMP_MAP_FROM:
13242 case GOMP_MAP_TOFROM:
13243 case GOMP_MAP_ALWAYS_TO:
13244 case GOMP_MAP_ALWAYS_FROM:
13245 case GOMP_MAP_ALWAYS_TOFROM:
13246 case GOMP_MAP_ALWAYS_PRESENT_TO:
13247 case GOMP_MAP_ALWAYS_PRESENT_FROM:
13248 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
13249 case GOMP_MAP_RELEASE:
13250 case GOMP_MAP_FORCE_TO:
13251 case GOMP_MAP_FORCE_FROM:
13252 case GOMP_MAP_FORCE_TOFROM:
13253 case GOMP_MAP_FORCE_PRESENT:
13254 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13255 break;
13256 case GOMP_MAP_DELETE:
13257 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13258 default:
13259 break;
13261 if (tkind_zero != tkind)
13263 if (integer_zerop (s))
13264 tkind = tkind_zero;
13265 else if (integer_nonzerop (s))
13266 tkind_zero = tkind;
13268 if (tkind_zero == tkind
13269 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c)
13270 && (((tkind & GOMP_MAP_FLAG_SPECIAL_BITS)
13271 & ~GOMP_MAP_IMPLICIT)
13272 == 0))
13274 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13275 bits are not interfered by other special bit encodings,
13276 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13277 to see. */
13278 tkind |= GOMP_MAP_IMPLICIT;
13279 tkind_zero = tkind;
13281 break;
13282 case OMP_CLAUSE_FIRSTPRIVATE:
13283 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13284 tkind = GOMP_MAP_TO;
13285 tkind_zero = tkind;
13286 break;
13287 case OMP_CLAUSE_TO:
13288 tkind
13289 = (OMP_CLAUSE_MOTION_PRESENT (c)
13290 ? GOMP_MAP_ALWAYS_PRESENT_TO : GOMP_MAP_TO);
13291 tkind_zero = tkind;
13292 break;
13293 case OMP_CLAUSE_FROM:
13294 tkind
13295 = (OMP_CLAUSE_MOTION_PRESENT (c)
13296 ? GOMP_MAP_ALWAYS_PRESENT_FROM : GOMP_MAP_FROM);
13297 tkind_zero = tkind;
13298 break;
13299 default:
13300 gcc_unreachable ();
13302 gcc_checking_assert (tkind
13303 < (HOST_WIDE_INT_C (1U) << talign_shift));
13304 gcc_checking_assert (tkind_zero
13305 < (HOST_WIDE_INT_C (1U) << talign_shift));
13306 talign = ceil_log2 (talign);
13307 tkind |= talign << talign_shift;
13308 tkind_zero |= talign << talign_shift;
13309 gcc_checking_assert (tkind
13310 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13311 gcc_checking_assert (tkind_zero
13312 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13313 if (tkind == tkind_zero)
13314 x = build_int_cstu (tkind_type, tkind);
13315 else
13317 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13318 x = build3 (COND_EXPR, tkind_type,
13319 fold_build2 (EQ_EXPR, boolean_type_node,
13320 unshare_expr (s), size_zero_node),
13321 build_int_cstu (tkind_type, tkind_zero),
13322 build_int_cstu (tkind_type, tkind));
13324 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13325 if (nc && nc != c)
13326 c = nc;
13327 break;
13329 case OMP_CLAUSE_FIRSTPRIVATE:
13330 omp_has_device_addr_descr:
13331 if (is_gimple_omp_oacc (ctx->stmt))
13332 goto oacc_firstprivate_map;
13333 ovar = OMP_CLAUSE_DECL (c);
13334 if (omp_privatize_by_reference (ovar))
13335 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13336 else
13337 talign = DECL_ALIGN_UNIT (ovar);
13338 var = lookup_decl_in_outer_ctx (ovar, ctx);
13339 x = build_sender_ref (ovar, ctx);
13340 tkind = GOMP_MAP_FIRSTPRIVATE;
13341 type = TREE_TYPE (ovar);
13342 if (omp_privatize_by_reference (ovar))
13343 type = TREE_TYPE (type);
13344 if ((INTEGRAL_TYPE_P (type)
13345 && TYPE_PRECISION (type) <= POINTER_SIZE)
13346 || TREE_CODE (type) == POINTER_TYPE)
13348 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13349 tree t = var;
13350 if (omp_privatize_by_reference (var))
13351 t = build_simple_mem_ref (var);
13352 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13353 suppress_warning (var);
13354 if (TREE_CODE (type) != POINTER_TYPE)
13355 t = fold_convert (pointer_sized_int_node, t);
13356 t = fold_convert (TREE_TYPE (x), t);
13357 gimplify_assign (x, t, &ilist);
13359 else if (omp_privatize_by_reference (var))
13360 gimplify_assign (x, var, &ilist);
13361 else if (is_gimple_reg (var))
13363 tree avar = create_tmp_var (TREE_TYPE (var));
13364 mark_addressable (avar);
13365 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13366 suppress_warning (var);
13367 gimplify_assign (avar, var, &ilist);
13368 avar = build_fold_addr_expr (avar);
13369 gimplify_assign (x, avar, &ilist);
13371 else
13373 var = build_fold_addr_expr (var);
13374 gimplify_assign (x, var, &ilist);
13376 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13377 s = size_int (0);
13378 else if (omp_privatize_by_reference (ovar))
13379 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13380 else
13381 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13382 s = fold_convert (size_type_node, s);
13383 purpose = size_int (map_idx++);
13384 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13385 if (TREE_CODE (s) != INTEGER_CST)
13386 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13388 gcc_checking_assert (tkind
13389 < (HOST_WIDE_INT_C (1U) << talign_shift));
13390 talign = ceil_log2 (talign);
13391 tkind |= talign << talign_shift;
13392 gcc_checking_assert (tkind
13393 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13394 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13395 build_int_cstu (tkind_type, tkind));
13396 /* Fortran array descriptors: firstprivate of data + attach. */
13397 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13398 && lang_hooks.decls.omp_array_data (ovar, true))
13400 tree not_null_lb, null_lb, after_lb;
13401 tree var1, var2, size1, size2;
13402 tree present = omp_check_optional_argument (ovar, true);
13403 if (present)
13405 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13406 not_null_lb = create_artificial_label (clause_loc);
13407 null_lb = create_artificial_label (clause_loc);
13408 after_lb = create_artificial_label (clause_loc);
13409 gimple_seq seq = NULL;
13410 present = force_gimple_operand (present, &seq, true,
13411 NULL_TREE);
13412 gimple_seq_add_seq (&ilist, seq);
13413 gimple_seq_add_stmt (&ilist,
13414 gimple_build_cond_from_tree (present,
13415 not_null_lb, null_lb));
13416 gimple_seq_add_stmt (&ilist,
13417 gimple_build_label (not_null_lb));
13419 var1 = lang_hooks.decls.omp_array_data (var, false);
13420 size1 = lang_hooks.decls.omp_array_size (var, &ilist);
13421 var2 = build_fold_addr_expr (x);
13422 if (!POINTER_TYPE_P (TREE_TYPE (var)))
13423 var = build_fold_addr_expr (var);
13424 size2 = fold_build2 (POINTER_DIFF_EXPR, ssizetype,
13425 build_fold_addr_expr (var1), var);
13426 size2 = fold_convert (sizetype, size2);
13427 if (present)
13429 tree tmp = create_tmp_var (TREE_TYPE (var1));
13430 gimplify_assign (tmp, var1, &ilist);
13431 var1 = tmp;
13432 tmp = create_tmp_var (TREE_TYPE (var2));
13433 gimplify_assign (tmp, var2, &ilist);
13434 var2 = tmp;
13435 tmp = create_tmp_var (TREE_TYPE (size1));
13436 gimplify_assign (tmp, size1, &ilist);
13437 size1 = tmp;
13438 tmp = create_tmp_var (TREE_TYPE (size2));
13439 gimplify_assign (tmp, size2, &ilist);
13440 size2 = tmp;
13441 gimple_seq_add_stmt (&ilist, gimple_build_goto (after_lb));
13442 gimple_seq_add_stmt (&ilist, gimple_build_label (null_lb));
13443 gimplify_assign (var1, null_pointer_node, &ilist);
13444 gimplify_assign (var2, null_pointer_node, &ilist);
13445 gimplify_assign (size1, size_zero_node, &ilist);
13446 gimplify_assign (size2, size_zero_node, &ilist);
13447 gimple_seq_add_stmt (&ilist, gimple_build_label (after_lb));
13449 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13450 gimplify_assign (x, var1, &ilist);
13451 tkind = GOMP_MAP_FIRSTPRIVATE;
13452 talign = DECL_ALIGN_UNIT (ovar);
13453 talign = ceil_log2 (talign);
13454 tkind |= talign << talign_shift;
13455 gcc_checking_assert (tkind
13456 <= tree_to_uhwi (
13457 TYPE_MAX_VALUE (tkind_type)));
13458 purpose = size_int (map_idx++);
13459 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size1);
13460 if (TREE_CODE (size1) != INTEGER_CST)
13461 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13462 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13463 build_int_cstu (tkind_type, tkind));
13464 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13465 gimplify_assign (x, var2, &ilist);
13466 tkind = GOMP_MAP_ATTACH;
13467 purpose = size_int (map_idx++);
13468 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size2);
13469 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13470 build_int_cstu (tkind_type, tkind));
13472 break;
13474 case OMP_CLAUSE_USE_DEVICE_PTR:
13475 case OMP_CLAUSE_USE_DEVICE_ADDR:
13476 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13477 case OMP_CLAUSE_IS_DEVICE_PTR:
13478 ovar = OMP_CLAUSE_DECL (c);
13479 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13481 if (lang_hooks.decls.omp_array_data (ovar, true))
13482 goto omp_has_device_addr_descr;
13483 while (TREE_CODE (ovar) == INDIRECT_REF
13484 || TREE_CODE (ovar) == ARRAY_REF)
13485 ovar = TREE_OPERAND (ovar, 0);
13487 var = lookup_decl_in_outer_ctx (ovar, ctx);
13489 if (lang_hooks.decls.omp_array_data (ovar, true))
13491 tkind = ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13492 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13493 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13494 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13496 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13497 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13499 tkind = GOMP_MAP_USE_DEVICE_PTR;
13500 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13502 else
13504 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13505 x = build_sender_ref (ovar, ctx);
13508 if (is_gimple_omp_oacc (ctx->stmt))
13510 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13512 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13513 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13516 type = TREE_TYPE (ovar);
13517 if (lang_hooks.decls.omp_array_data (ovar, true))
13518 var = lang_hooks.decls.omp_array_data (var, false);
13519 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13520 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13521 && !omp_privatize_by_reference (ovar)
13522 && !omp_is_allocatable_or_ptr (ovar))
13523 || TREE_CODE (type) == ARRAY_TYPE)
13524 var = build_fold_addr_expr (var);
13525 else
13527 if (omp_privatize_by_reference (ovar)
13528 || omp_check_optional_argument (ovar, false)
13529 || omp_is_allocatable_or_ptr (ovar))
13531 type = TREE_TYPE (type);
13532 if (POINTER_TYPE_P (type)
13533 && TREE_CODE (type) != ARRAY_TYPE
13534 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13535 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13536 && !omp_is_allocatable_or_ptr (ovar))
13537 || (omp_privatize_by_reference (ovar)
13538 && omp_is_allocatable_or_ptr (ovar))))
13539 var = build_simple_mem_ref (var);
13540 var = fold_convert (TREE_TYPE (x), var);
13543 tree present;
13544 present = omp_check_optional_argument (ovar, true);
13545 if (present)
13547 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13548 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13549 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13550 tree new_x = unshare_expr (x);
13551 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13552 fb_rvalue);
13553 gcond *cond = gimple_build_cond_from_tree (present,
13554 notnull_label,
13555 null_label);
13556 gimple_seq_add_stmt (&ilist, cond);
13557 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13558 gimplify_assign (new_x, null_pointer_node, &ilist);
13559 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13560 gimple_seq_add_stmt (&ilist,
13561 gimple_build_label (notnull_label));
13562 gimplify_assign (x, var, &ilist);
13563 gimple_seq_add_stmt (&ilist,
13564 gimple_build_label (opt_arg_label));
13566 else
13567 gimplify_assign (x, var, &ilist);
13568 s = size_int (0);
13569 purpose = size_int (map_idx++);
13570 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13571 gcc_checking_assert (tkind
13572 < (HOST_WIDE_INT_C (1U) << talign_shift));
13573 gcc_checking_assert (tkind
13574 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13575 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13576 build_int_cstu (tkind_type, tkind));
13577 break;
13580 gcc_assert (map_idx == map_cnt);
13582 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13583 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13584 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13585 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13586 for (int i = 1; i <= 2; i++)
13587 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13589 gimple_seq initlist = NULL;
13590 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13591 TREE_VEC_ELT (t, i)),
13592 &initlist, true, NULL_TREE);
13593 gimple_seq_add_seq (&ilist, initlist);
13595 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13596 gimple_seq_add_stmt (&olist,
13597 gimple_build_assign (TREE_VEC_ELT (t, i),
13598 clobber));
13600 else if (omp_maybe_offloaded_ctx (ctx->outer))
13602 tree id = get_identifier ("omp declare target");
13603 tree decl = TREE_VEC_ELT (t, i);
13604 DECL_ATTRIBUTES (decl)
13605 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13606 varpool_node *node = varpool_node::get (decl);
13607 if (node)
13609 node->offloadable = 1;
13610 if (ENABLE_OFFLOADING)
13612 g->have_offload = true;
13613 vec_safe_push (offload_vars, t);
13618 tree clobber = build_clobber (ctx->record_type);
13619 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13620 clobber));
13623 /* Once all the expansions are done, sequence all the different
13624 fragments inside gimple_omp_body. */
13626 new_body = NULL;
13628 if (offloaded
13629 && ctx->record_type)
13631 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13632 /* fixup_child_record_type might have changed receiver_decl's type. */
13633 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13634 gimple_seq_add_stmt (&new_body,
13635 gimple_build_assign (ctx->receiver_decl, t));
13637 gimple_seq_add_seq (&new_body, fplist);
13639 if (offloaded || data_region)
13641 tree prev = NULL_TREE;
13642 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13643 switch (OMP_CLAUSE_CODE (c))
13645 tree var, x;
13646 default:
13647 break;
13648 case OMP_CLAUSE_FIRSTPRIVATE:
13649 omp_firstprivatize_data_region:
13650 if (is_gimple_omp_oacc (ctx->stmt))
13651 break;
13652 var = OMP_CLAUSE_DECL (c);
13653 if (omp_privatize_by_reference (var)
13654 || is_gimple_reg_type (TREE_TYPE (var)))
13656 tree new_var = lookup_decl (var, ctx);
13657 tree type;
13658 type = TREE_TYPE (var);
13659 if (omp_privatize_by_reference (var))
13660 type = TREE_TYPE (type);
13661 if ((INTEGRAL_TYPE_P (type)
13662 && TYPE_PRECISION (type) <= POINTER_SIZE)
13663 || TREE_CODE (type) == POINTER_TYPE)
13665 x = build_receiver_ref (var, false, ctx);
13666 if (TREE_CODE (type) != POINTER_TYPE)
13667 x = fold_convert (pointer_sized_int_node, x);
13668 x = fold_convert (type, x);
13669 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13670 fb_rvalue);
13671 if (omp_privatize_by_reference (var))
13673 tree v = create_tmp_var_raw (type, get_name (var));
13674 gimple_add_tmp_var (v);
13675 TREE_ADDRESSABLE (v) = 1;
13676 gimple_seq_add_stmt (&new_body,
13677 gimple_build_assign (v, x));
13678 x = build_fold_addr_expr (v);
13680 gimple_seq_add_stmt (&new_body,
13681 gimple_build_assign (new_var, x));
13683 else
13685 bool by_ref = !omp_privatize_by_reference (var);
13686 x = build_receiver_ref (var, by_ref, ctx);
13687 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13688 fb_rvalue);
13689 gimple_seq_add_stmt (&new_body,
13690 gimple_build_assign (new_var, x));
13693 else if (is_variable_sized (var))
13695 tree pvar = DECL_VALUE_EXPR (var);
13696 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13697 pvar = TREE_OPERAND (pvar, 0);
13698 gcc_assert (DECL_P (pvar));
13699 tree new_var = lookup_decl (pvar, ctx);
13700 x = build_receiver_ref (var, false, ctx);
13701 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13702 gimple_seq_add_stmt (&new_body,
13703 gimple_build_assign (new_var, x));
13705 break;
13706 case OMP_CLAUSE_PRIVATE:
13707 if (is_gimple_omp_oacc (ctx->stmt))
13708 break;
13709 var = OMP_CLAUSE_DECL (c);
13710 if (omp_privatize_by_reference (var))
13712 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13713 tree new_var = lookup_decl (var, ctx);
13714 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13715 if (TREE_CONSTANT (x))
13717 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13718 get_name (var));
13719 gimple_add_tmp_var (x);
13720 TREE_ADDRESSABLE (x) = 1;
13721 x = build_fold_addr_expr_loc (clause_loc, x);
13723 else
13724 break;
13726 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13727 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13728 gimple_seq_add_stmt (&new_body,
13729 gimple_build_assign (new_var, x));
13731 break;
13732 case OMP_CLAUSE_USE_DEVICE_PTR:
13733 case OMP_CLAUSE_USE_DEVICE_ADDR:
13734 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13735 case OMP_CLAUSE_IS_DEVICE_PTR:
13736 tree new_var;
13737 gimple_seq assign_body;
13738 bool is_array_data;
13739 bool do_optional_check;
13740 assign_body = NULL;
13741 do_optional_check = false;
13742 var = OMP_CLAUSE_DECL (c);
13743 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13744 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR && is_array_data)
13745 goto omp_firstprivatize_data_region;
13747 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13748 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13749 x = build_sender_ref (is_array_data
13750 ? (splay_tree_key) &DECL_NAME (var)
13751 : (splay_tree_key) &DECL_UID (var), ctx);
13752 else
13754 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13756 while (TREE_CODE (var) == INDIRECT_REF
13757 || TREE_CODE (var) == ARRAY_REF)
13758 var = TREE_OPERAND (var, 0);
13760 x = build_receiver_ref (var, false, ctx);
13763 if (is_array_data)
13765 bool is_ref = omp_privatize_by_reference (var);
13766 do_optional_check = true;
13767 /* First, we copy the descriptor data from the host; then
13768 we update its data to point to the target address. */
13769 new_var = lookup_decl (var, ctx);
13770 new_var = DECL_VALUE_EXPR (new_var);
13771 tree v = new_var;
13772 tree v2 = var;
13773 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR
13774 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR)
13775 v2 = maybe_lookup_decl_in_outer_ctx (var, ctx);
13777 if (is_ref)
13779 v2 = build_fold_indirect_ref (v2);
13780 v = create_tmp_var_raw (TREE_TYPE (v2), get_name (var));
13781 gimple_add_tmp_var (v);
13782 TREE_ADDRESSABLE (v) = 1;
13783 gimplify_assign (v, v2, &assign_body);
13784 tree rhs = build_fold_addr_expr (v);
13785 gimple_seq_add_stmt (&assign_body,
13786 gimple_build_assign (new_var, rhs));
13788 else
13789 gimplify_assign (new_var, v2, &assign_body);
13791 v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13792 gcc_assert (v2);
13793 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13794 gimple_seq_add_stmt (&assign_body,
13795 gimple_build_assign (v2, x));
13797 else if (is_variable_sized (var))
13799 tree pvar = DECL_VALUE_EXPR (var);
13800 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13801 pvar = TREE_OPERAND (pvar, 0);
13802 gcc_assert (DECL_P (pvar));
13803 new_var = lookup_decl (pvar, ctx);
13804 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13805 gimple_seq_add_stmt (&assign_body,
13806 gimple_build_assign (new_var, x));
13808 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13809 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13810 && !omp_privatize_by_reference (var)
13811 && !omp_is_allocatable_or_ptr (var))
13812 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13814 new_var = lookup_decl (var, ctx);
13815 new_var = DECL_VALUE_EXPR (new_var);
13816 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13817 new_var = TREE_OPERAND (new_var, 0);
13818 gcc_assert (DECL_P (new_var));
13819 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13820 gimple_seq_add_stmt (&assign_body,
13821 gimple_build_assign (new_var, x));
13823 else
13825 tree type = TREE_TYPE (var);
13826 new_var = lookup_decl (var, ctx);
13827 if (omp_privatize_by_reference (var))
13829 type = TREE_TYPE (type);
13830 if (POINTER_TYPE_P (type)
13831 && TREE_CODE (type) != ARRAY_TYPE
13832 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13833 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13834 || (omp_privatize_by_reference (var)
13835 && omp_is_allocatable_or_ptr (var))))
13837 tree v = create_tmp_var_raw (type, get_name (var));
13838 gimple_add_tmp_var (v);
13839 TREE_ADDRESSABLE (v) = 1;
13840 x = fold_convert (type, x);
13841 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13842 fb_rvalue);
13843 gimple_seq_add_stmt (&assign_body,
13844 gimple_build_assign (v, x));
13845 x = build_fold_addr_expr (v);
13846 do_optional_check = true;
13849 new_var = DECL_VALUE_EXPR (new_var);
13850 x = fold_convert (TREE_TYPE (new_var), x);
13851 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13852 gimple_seq_add_stmt (&assign_body,
13853 gimple_build_assign (new_var, x));
13855 tree present;
13856 present = ((do_optional_check
13857 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13858 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13859 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13860 : NULL_TREE);
13861 if (present)
13863 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13864 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13865 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13866 glabel *null_glabel = gimple_build_label (null_label);
13867 glabel *notnull_glabel = gimple_build_label (notnull_label);
13868 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13869 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13870 fb_rvalue);
13871 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13872 fb_rvalue);
13873 gcond *cond = gimple_build_cond_from_tree (present,
13874 notnull_label,
13875 null_label);
13876 gimple_seq_add_stmt (&new_body, cond);
13877 gimple_seq_add_stmt (&new_body, null_glabel);
13878 gimplify_assign (new_var, null_pointer_node, &new_body);
13879 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13880 gimple_seq_add_stmt (&new_body, notnull_glabel);
13881 gimple_seq_add_seq (&new_body, assign_body);
13882 gimple_seq_add_stmt (&new_body,
13883 gimple_build_label (opt_arg_label));
13885 else
13886 gimple_seq_add_seq (&new_body, assign_body);
13887 break;
13889 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13890 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13891 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13892 or references to VLAs. */
13893 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13894 switch (OMP_CLAUSE_CODE (c))
13896 tree var;
13897 default:
13898 break;
13899 case OMP_CLAUSE_MAP:
13900 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13901 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13903 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13904 poly_int64 offset = 0;
13905 gcc_assert (prev);
13906 var = OMP_CLAUSE_DECL (c);
13907 if (DECL_P (var)
13908 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13909 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13910 ctx))
13911 && varpool_node::get_create (var)->offloadable)
13912 break;
13913 if (TREE_CODE (var) == INDIRECT_REF
13914 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
13915 var = TREE_OPERAND (var, 0);
13916 if (TREE_CODE (var) == COMPONENT_REF)
13918 var = get_addr_base_and_unit_offset (var, &offset);
13919 gcc_assert (var != NULL_TREE && DECL_P (var));
13921 else if (DECL_SIZE (var)
13922 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
13924 tree var2 = DECL_VALUE_EXPR (var);
13925 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
13926 var2 = TREE_OPERAND (var2, 0);
13927 gcc_assert (DECL_P (var2));
13928 var = var2;
13930 tree new_var = lookup_decl (var, ctx), x;
13931 tree type = TREE_TYPE (new_var);
13932 bool is_ref;
13933 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
13934 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13935 == COMPONENT_REF))
13937 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
13938 is_ref = true;
13939 new_var = build2 (MEM_REF, type,
13940 build_fold_addr_expr (new_var),
13941 build_int_cst (build_pointer_type (type),
13942 offset));
13944 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
13946 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
13947 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
13948 new_var = build2 (MEM_REF, type,
13949 build_fold_addr_expr (new_var),
13950 build_int_cst (build_pointer_type (type),
13951 offset));
13953 else
13954 is_ref = omp_privatize_by_reference (var);
13955 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13956 is_ref = false;
13957 bool ref_to_array = false;
13958 bool ref_to_ptr = false;
13959 if (is_ref)
13961 type = TREE_TYPE (type);
13962 if (TREE_CODE (type) == ARRAY_TYPE)
13964 type = build_pointer_type (type);
13965 ref_to_array = true;
13968 else if (TREE_CODE (type) == ARRAY_TYPE)
13970 tree decl2 = DECL_VALUE_EXPR (new_var);
13971 gcc_assert (TREE_CODE (decl2) == MEM_REF);
13972 decl2 = TREE_OPERAND (decl2, 0);
13973 gcc_assert (DECL_P (decl2));
13974 new_var = decl2;
13975 type = TREE_TYPE (new_var);
13977 else if (TREE_CODE (type) == REFERENCE_TYPE
13978 && TREE_CODE (TREE_TYPE (type)) == POINTER_TYPE)
13980 type = TREE_TYPE (type);
13981 ref_to_ptr = true;
13983 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
13984 x = fold_convert_loc (clause_loc, type, x);
13985 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
13987 tree bias = OMP_CLAUSE_SIZE (c);
13988 if (DECL_P (bias))
13989 bias = lookup_decl (bias, ctx);
13990 bias = fold_convert_loc (clause_loc, sizetype, bias);
13991 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
13992 bias);
13993 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
13994 TREE_TYPE (x), x, bias);
13996 if (ref_to_array)
13997 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13998 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13999 if ((is_ref && !ref_to_array)
14000 || ref_to_ptr)
14002 tree t = create_tmp_var_raw (type, get_name (var));
14003 gimple_add_tmp_var (t);
14004 TREE_ADDRESSABLE (t) = 1;
14005 gimple_seq_add_stmt (&new_body,
14006 gimple_build_assign (t, x));
14007 x = build_fold_addr_expr_loc (clause_loc, t);
14009 gimple_seq_add_stmt (&new_body,
14010 gimple_build_assign (new_var, x));
14011 prev = NULL_TREE;
14013 else if (OMP_CLAUSE_CHAIN (c)
14014 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
14015 == OMP_CLAUSE_MAP
14016 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
14017 == GOMP_MAP_FIRSTPRIVATE_POINTER
14018 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
14019 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
14020 prev = c;
14021 break;
14022 case OMP_CLAUSE_PRIVATE:
14023 var = OMP_CLAUSE_DECL (c);
14024 if (is_variable_sized (var))
14026 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14027 tree new_var = lookup_decl (var, ctx);
14028 tree pvar = DECL_VALUE_EXPR (var);
14029 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
14030 pvar = TREE_OPERAND (pvar, 0);
14031 gcc_assert (DECL_P (pvar));
14032 tree new_pvar = lookup_decl (pvar, ctx);
14033 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
14034 tree al = size_int (DECL_ALIGN (var));
14035 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
14036 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14037 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
14038 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14039 gimple_seq_add_stmt (&new_body,
14040 gimple_build_assign (new_pvar, x));
14042 else if (omp_privatize_by_reference (var)
14043 && !is_gimple_omp_oacc (ctx->stmt))
14045 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14046 tree new_var = lookup_decl (var, ctx);
14047 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
14048 if (TREE_CONSTANT (x))
14049 break;
14050 else
14052 tree atmp
14053 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
14054 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
14055 tree al = size_int (TYPE_ALIGN (rtype));
14056 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14059 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
14060 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14061 gimple_seq_add_stmt (&new_body,
14062 gimple_build_assign (new_var, x));
14064 break;
14067 gimple_seq fork_seq = NULL;
14068 gimple_seq join_seq = NULL;
14070 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
14072 /* If there are reductions on the offloaded region itself, treat
14073 them as a dummy GANG loop. */
14074 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
14076 gcall *private_marker = lower_oacc_private_marker (ctx);
14078 if (private_marker)
14079 gimple_call_set_arg (private_marker, 2, level);
14081 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
14082 false, NULL, private_marker, NULL, &fork_seq,
14083 &join_seq, ctx);
14086 gimple_seq_add_seq (&new_body, fork_seq);
14087 gimple_seq_add_seq (&new_body, tgt_body);
14088 gimple_seq_add_seq (&new_body, join_seq);
14090 if (offloaded)
14092 new_body = maybe_catch_exception (new_body);
14093 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
14095 gimple_omp_set_body (stmt, new_body);
14098 bind = gimple_build_bind (NULL, NULL,
14099 tgt_bind ? gimple_bind_block (tgt_bind)
14100 : NULL_TREE);
14101 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
14102 gimple_bind_add_seq (bind, ilist);
14103 gimple_bind_add_stmt (bind, stmt);
14104 gimple_bind_add_seq (bind, olist);
14106 pop_gimplify_context (NULL);
14108 if (dep_bind)
14110 gimple_bind_add_seq (dep_bind, dep_ilist);
14111 gimple_bind_add_stmt (dep_bind, bind);
14112 gimple_bind_add_seq (dep_bind, dep_olist);
14113 pop_gimplify_context (dep_bind);
14117 /* Expand code for an OpenMP teams directive. */
14119 static void
14120 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14122 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
14123 push_gimplify_context ();
14125 tree block = make_node (BLOCK);
14126 gbind *bind = gimple_build_bind (NULL, NULL, block);
14127 gsi_replace (gsi_p, bind, true);
14128 gimple_seq bind_body = NULL;
14129 gimple_seq dlist = NULL;
14130 gimple_seq olist = NULL;
14132 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14133 OMP_CLAUSE_NUM_TEAMS);
14134 tree num_teams_lower = NULL_TREE;
14135 if (num_teams == NULL_TREE)
14136 num_teams = build_int_cst (unsigned_type_node, 0);
14137 else
14139 num_teams_lower = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams);
14140 if (num_teams_lower)
14142 num_teams_lower = fold_convert (unsigned_type_node, num_teams_lower);
14143 gimplify_expr (&num_teams_lower, &bind_body, NULL, is_gimple_val,
14144 fb_rvalue);
14146 num_teams = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams);
14147 num_teams = fold_convert (unsigned_type_node, num_teams);
14148 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
14150 if (num_teams_lower == NULL_TREE)
14151 num_teams_lower = num_teams;
14152 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14153 OMP_CLAUSE_THREAD_LIMIT);
14154 if (thread_limit == NULL_TREE)
14155 thread_limit = build_int_cst (unsigned_type_node, 0);
14156 else
14158 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
14159 thread_limit = fold_convert (unsigned_type_node, thread_limit);
14160 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
14161 fb_rvalue);
14163 location_t loc = gimple_location (teams_stmt);
14164 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4);
14165 tree rettype = TREE_TYPE (TREE_TYPE (decl));
14166 tree first = create_tmp_var (rettype);
14167 gimple_seq_add_stmt (&bind_body,
14168 gimple_build_assign (first, build_one_cst (rettype)));
14169 tree llabel = create_artificial_label (loc);
14170 gimple_seq_add_stmt (&bind_body, gimple_build_label (llabel));
14171 gimple *call
14172 = gimple_build_call (decl, 4, num_teams_lower, num_teams, thread_limit,
14173 first);
14174 gimple_set_location (call, loc);
14175 tree temp = create_tmp_var (rettype);
14176 gimple_call_set_lhs (call, temp);
14177 gimple_seq_add_stmt (&bind_body, call);
14179 tree tlabel = create_artificial_label (loc);
14180 tree flabel = create_artificial_label (loc);
14181 gimple *cond = gimple_build_cond (NE_EXPR, temp, build_zero_cst (rettype),
14182 tlabel, flabel);
14183 gimple_seq_add_stmt (&bind_body, cond);
14184 gimple_seq_add_stmt (&bind_body, gimple_build_label (tlabel));
14185 gimple_seq_add_stmt (&bind_body,
14186 gimple_build_assign (first, build_zero_cst (rettype)));
14188 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
14189 &bind_body, &dlist, ctx, NULL);
14190 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
14191 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
14192 NULL, ctx);
14193 gimple_seq_add_stmt (&bind_body, teams_stmt);
14195 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
14196 gimple_omp_set_body (teams_stmt, NULL);
14197 gimple_seq_add_seq (&bind_body, olist);
14198 gimple_seq_add_seq (&bind_body, dlist);
14199 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
14200 gimple_seq_add_stmt (&bind_body, gimple_build_goto (llabel));
14201 gimple_seq_add_stmt (&bind_body, gimple_build_label (flabel));
14202 gimple_bind_set_body (bind, bind_body);
14204 pop_gimplify_context (bind);
14206 gimple_bind_append_vars (bind, ctx->block_vars);
14207 BLOCK_VARS (block) = ctx->block_vars;
14208 if (BLOCK_VARS (block))
14209 TREE_USED (block) = 1;
14212 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14213 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14214 of OMP context, but with make_addressable_vars set. */
14216 static tree
14217 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
14218 void *data)
14220 tree t = *tp;
14222 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14223 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
14224 && data == NULL
14225 && DECL_HAS_VALUE_EXPR_P (t))
14226 return t;
14228 if (make_addressable_vars
14229 && DECL_P (t)
14230 && bitmap_bit_p (make_addressable_vars, DECL_UID (t)))
14231 return t;
14233 /* If a global variable has been privatized, TREE_CONSTANT on
14234 ADDR_EXPR might be wrong. */
14235 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
14236 recompute_tree_invariant_for_addr_expr (t);
14238 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
14239 return NULL_TREE;
14242 /* Data to be communicated between lower_omp_regimplify_operands and
14243 lower_omp_regimplify_operands_p. */
14245 struct lower_omp_regimplify_operands_data
14247 omp_context *ctx;
14248 vec<tree> *decls;
14251 /* Helper function for lower_omp_regimplify_operands. Find
14252 omp_member_access_dummy_var vars and adjust temporarily their
14253 DECL_VALUE_EXPRs if needed. */
14255 static tree
14256 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
14257 void *data)
14259 tree t = omp_member_access_dummy_var (*tp);
14260 if (t)
14262 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
14263 lower_omp_regimplify_operands_data *ldata
14264 = (lower_omp_regimplify_operands_data *) wi->info;
14265 tree o = maybe_lookup_decl (t, ldata->ctx);
14266 if (o != t)
14268 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
14269 ldata->decls->safe_push (*tp);
14270 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
14271 SET_DECL_VALUE_EXPR (*tp, v);
14274 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
14275 return NULL_TREE;
14278 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14279 of omp_member_access_dummy_var vars during regimplification. */
14281 static void
14282 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
14283 gimple_stmt_iterator *gsi_p)
14285 auto_vec<tree, 10> decls;
14286 if (ctx)
14288 struct walk_stmt_info wi;
14289 memset (&wi, '\0', sizeof (wi));
14290 struct lower_omp_regimplify_operands_data data;
14291 data.ctx = ctx;
14292 data.decls = &decls;
14293 wi.info = &data;
14294 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14296 gimple_regimplify_operands (stmt, gsi_p);
14297 while (!decls.is_empty ())
14299 tree t = decls.pop ();
14300 tree v = decls.pop ();
14301 SET_DECL_VALUE_EXPR (t, v);
14305 static void
14306 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14308 gimple *stmt = gsi_stmt (*gsi_p);
14309 struct walk_stmt_info wi;
14310 gcall *call_stmt;
14312 if (gimple_has_location (stmt))
14313 input_location = gimple_location (stmt);
14315 if (make_addressable_vars)
14316 memset (&wi, '\0', sizeof (wi));
14318 /* If we have issued syntax errors, avoid doing any heavy lifting.
14319 Just replace the OMP directives with a NOP to avoid
14320 confusing RTL expansion. */
14321 if (seen_error () && is_gimple_omp (stmt))
14323 gsi_replace (gsi_p, gimple_build_nop (), true);
14324 return;
14327 switch (gimple_code (stmt))
14329 case GIMPLE_COND:
14331 gcond *cond_stmt = as_a <gcond *> (stmt);
14332 if ((ctx || make_addressable_vars)
14333 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14334 lower_omp_regimplify_p,
14335 ctx ? NULL : &wi, NULL)
14336 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14337 lower_omp_regimplify_p,
14338 ctx ? NULL : &wi, NULL)))
14339 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14341 break;
14342 case GIMPLE_CATCH:
14343 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14344 break;
14345 case GIMPLE_EH_FILTER:
14346 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14347 break;
14348 case GIMPLE_TRY:
14349 lower_omp (gimple_try_eval_ptr (stmt), ctx);
14350 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14351 break;
14352 case GIMPLE_ASSUME:
14353 lower_omp (gimple_assume_body_ptr (stmt), ctx);
14354 break;
14355 case GIMPLE_TRANSACTION:
14356 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14357 ctx);
14358 break;
14359 case GIMPLE_BIND:
14360 if (ctx && is_gimple_omp_oacc (ctx->stmt))
14362 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14363 oacc_privatization_scan_decl_chain (ctx, vars);
14365 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14366 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14367 break;
14368 case GIMPLE_OMP_PARALLEL:
14369 case GIMPLE_OMP_TASK:
14370 ctx = maybe_lookup_ctx (stmt);
14371 gcc_assert (ctx);
14372 if (ctx->cancellable)
14373 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14374 lower_omp_taskreg (gsi_p, ctx);
14375 break;
14376 case GIMPLE_OMP_FOR:
14377 ctx = maybe_lookup_ctx (stmt);
14378 gcc_assert (ctx);
14379 if (ctx->cancellable)
14380 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14381 lower_omp_for (gsi_p, ctx);
14382 break;
14383 case GIMPLE_OMP_SECTIONS:
14384 ctx = maybe_lookup_ctx (stmt);
14385 gcc_assert (ctx);
14386 if (ctx->cancellable)
14387 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14388 lower_omp_sections (gsi_p, ctx);
14389 break;
14390 case GIMPLE_OMP_SCOPE:
14391 ctx = maybe_lookup_ctx (stmt);
14392 gcc_assert (ctx);
14393 lower_omp_scope (gsi_p, ctx);
14394 break;
14395 case GIMPLE_OMP_SINGLE:
14396 ctx = maybe_lookup_ctx (stmt);
14397 gcc_assert (ctx);
14398 lower_omp_single (gsi_p, ctx);
14399 break;
14400 case GIMPLE_OMP_STRUCTURED_BLOCK:
14401 /* We have already done error checking at this point, so these nodes
14402 can be completely removed and replaced with their body. */
14403 ctx = maybe_lookup_ctx (stmt);
14404 gcc_assert (ctx);
14405 lower_omp (gimple_omp_body_ptr (stmt), ctx);
14406 gsi_replace_with_seq (gsi_p, gimple_omp_body (stmt), true);
14407 break;
14408 case GIMPLE_OMP_MASTER:
14409 case GIMPLE_OMP_MASKED:
14410 ctx = maybe_lookup_ctx (stmt);
14411 gcc_assert (ctx);
14412 lower_omp_master (gsi_p, ctx);
14413 break;
14414 case GIMPLE_OMP_TASKGROUP:
14415 ctx = maybe_lookup_ctx (stmt);
14416 gcc_assert (ctx);
14417 lower_omp_taskgroup (gsi_p, ctx);
14418 break;
14419 case GIMPLE_OMP_ORDERED:
14420 ctx = maybe_lookup_ctx (stmt);
14421 gcc_assert (ctx);
14422 lower_omp_ordered (gsi_p, ctx);
14423 break;
14424 case GIMPLE_OMP_SCAN:
14425 ctx = maybe_lookup_ctx (stmt);
14426 gcc_assert (ctx);
14427 lower_omp_scan (gsi_p, ctx);
14428 break;
14429 case GIMPLE_OMP_CRITICAL:
14430 ctx = maybe_lookup_ctx (stmt);
14431 gcc_assert (ctx);
14432 lower_omp_critical (gsi_p, ctx);
14433 break;
14434 case GIMPLE_OMP_ATOMIC_LOAD:
14435 if ((ctx || make_addressable_vars)
14436 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14437 as_a <gomp_atomic_load *> (stmt)),
14438 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14439 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14440 break;
14441 case GIMPLE_OMP_TARGET:
14442 ctx = maybe_lookup_ctx (stmt);
14443 gcc_assert (ctx);
14444 lower_omp_target (gsi_p, ctx);
14445 break;
14446 case GIMPLE_OMP_TEAMS:
14447 ctx = maybe_lookup_ctx (stmt);
14448 gcc_assert (ctx);
14449 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14450 lower_omp_taskreg (gsi_p, ctx);
14451 else
14452 lower_omp_teams (gsi_p, ctx);
14453 break;
14454 case GIMPLE_CALL:
14455 tree fndecl;
14456 call_stmt = as_a <gcall *> (stmt);
14457 fndecl = gimple_call_fndecl (call_stmt);
14458 if (fndecl
14459 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14460 switch (DECL_FUNCTION_CODE (fndecl))
14462 case BUILT_IN_GOMP_BARRIER:
14463 if (ctx == NULL)
14464 break;
14465 /* FALLTHRU */
14466 case BUILT_IN_GOMP_CANCEL:
14467 case BUILT_IN_GOMP_CANCELLATION_POINT:
14468 omp_context *cctx;
14469 cctx = ctx;
14470 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14471 cctx = cctx->outer;
14472 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14473 if (!cctx->cancellable)
14475 if (DECL_FUNCTION_CODE (fndecl)
14476 == BUILT_IN_GOMP_CANCELLATION_POINT)
14478 stmt = gimple_build_nop ();
14479 gsi_replace (gsi_p, stmt, false);
14481 break;
14483 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14485 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14486 gimple_call_set_fndecl (call_stmt, fndecl);
14487 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14489 tree lhs;
14490 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14491 gimple_call_set_lhs (call_stmt, lhs);
14492 tree fallthru_label;
14493 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14494 gimple *g;
14495 g = gimple_build_label (fallthru_label);
14496 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14497 g = gimple_build_cond (NE_EXPR, lhs,
14498 fold_convert (TREE_TYPE (lhs),
14499 boolean_false_node),
14500 cctx->cancel_label, fallthru_label);
14501 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14502 break;
14503 default:
14504 break;
14506 goto regimplify;
14508 case GIMPLE_ASSIGN:
14509 for (omp_context *up = ctx; up; up = up->outer)
14511 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14512 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14513 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14514 || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14515 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14516 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14517 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14518 && (gimple_omp_target_kind (up->stmt)
14519 == GF_OMP_TARGET_KIND_DATA)))
14520 continue;
14521 else if (!up->lastprivate_conditional_map)
14522 break;
14523 tree lhs = get_base_address (gimple_assign_lhs (stmt));
14524 if (TREE_CODE (lhs) == MEM_REF
14525 && DECL_P (TREE_OPERAND (lhs, 0))
14526 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14527 0))) == REFERENCE_TYPE)
14528 lhs = TREE_OPERAND (lhs, 0);
14529 if (DECL_P (lhs))
14530 if (tree *v = up->lastprivate_conditional_map->get (lhs))
14532 tree clauses;
14533 if (up->combined_into_simd_safelen1)
14535 up = up->outer;
14536 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14537 up = up->outer;
14539 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14540 clauses = gimple_omp_for_clauses (up->stmt);
14541 else
14542 clauses = gimple_omp_sections_clauses (up->stmt);
14543 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14544 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14545 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14546 OMP_CLAUSE__CONDTEMP_);
14547 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14548 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14549 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14552 /* FALLTHRU */
14554 default:
14555 regimplify:
14556 if ((ctx || make_addressable_vars)
14557 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14558 ctx ? NULL : &wi))
14560 /* Just remove clobbers, this should happen only if we have
14561 "privatized" local addressable variables in SIMD regions,
14562 the clobber isn't needed in that case and gimplifying address
14563 of the ARRAY_REF into a pointer and creating MEM_REF based
14564 clobber would create worse code than we get with the clobber
14565 dropped. */
14566 if (gimple_clobber_p (stmt))
14568 gsi_replace (gsi_p, gimple_build_nop (), true);
14569 break;
14571 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14573 break;
14577 static void
14578 lower_omp (gimple_seq *body, omp_context *ctx)
14580 location_t saved_location = input_location;
14581 gimple_stmt_iterator gsi;
14582 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14583 lower_omp_1 (&gsi, ctx);
14584 /* During gimplification, we haven't folded statments inside offloading
14585 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14586 if (target_nesting_level || taskreg_nesting_level)
14587 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14588 fold_stmt (&gsi);
14589 input_location = saved_location;
14592 /* Main entry point. */
14594 static unsigned int
14595 execute_lower_omp (void)
14597 gimple_seq body;
14598 int i;
14599 omp_context *ctx;
14601 /* This pass always runs, to provide PROP_gimple_lomp.
14602 But often, there is nothing to do. */
14603 if (flag_openacc == 0 && flag_openmp == 0
14604 && flag_openmp_simd == 0)
14605 return 0;
14607 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14608 delete_omp_context);
14610 body = gimple_body (current_function_decl);
14612 scan_omp (&body, NULL);
14613 gcc_assert (taskreg_nesting_level == 0);
14614 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14615 finish_taskreg_scan (ctx);
14616 taskreg_contexts.release ();
14618 if (all_contexts->root)
14620 if (make_addressable_vars)
14621 push_gimplify_context ();
14622 lower_omp (&body, NULL);
14623 if (make_addressable_vars)
14624 pop_gimplify_context (NULL);
14627 if (all_contexts)
14629 splay_tree_delete (all_contexts);
14630 all_contexts = NULL;
14632 BITMAP_FREE (make_addressable_vars);
14633 BITMAP_FREE (global_nonaddressable_vars);
14635 /* If current function is a method, remove artificial dummy VAR_DECL created
14636 for non-static data member privatization, they aren't needed for
14637 debuginfo nor anything else, have been already replaced everywhere in the
14638 IL and cause problems with LTO. */
14639 if (DECL_ARGUMENTS (current_function_decl)
14640 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14641 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14642 == POINTER_TYPE))
14643 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14645 for (auto task_stmt : task_cpyfns)
14646 finalize_task_copyfn (task_stmt);
14647 task_cpyfns.release ();
14648 return 0;
14651 namespace {
14653 const pass_data pass_data_lower_omp =
14655 GIMPLE_PASS, /* type */
14656 "omplower", /* name */
14657 OPTGROUP_OMP, /* optinfo_flags */
14658 TV_NONE, /* tv_id */
14659 PROP_gimple_any, /* properties_required */
14660 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14661 0, /* properties_destroyed */
14662 0, /* todo_flags_start */
14663 0, /* todo_flags_finish */
14666 class pass_lower_omp : public gimple_opt_pass
14668 public:
14669 pass_lower_omp (gcc::context *ctxt)
14670 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14673 /* opt_pass methods: */
14674 unsigned int execute (function *) final override
14676 return execute_lower_omp ();
14679 }; // class pass_lower_omp
14681 } // anon namespace
14683 gimple_opt_pass *
14684 make_pass_lower_omp (gcc::context *ctxt)
14686 return new pass_lower_omp (ctxt);
14689 /* The following is a utility to diagnose structured block violations.
14690 It is not part of the "omplower" pass, as that's invoked too late. It
14691 should be invoked by the respective front ends after gimplification. */
14693 static splay_tree all_labels;
14695 /* Check for mismatched contexts and generate an error if needed. Return
14696 true if an error is detected. */
14698 static bool
14699 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14700 gimple *branch_ctx, gimple *label_ctx)
14702 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14703 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14705 if (label_ctx == branch_ctx)
14706 return false;
14708 const char* kind = NULL;
14710 if (flag_openacc)
14712 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14713 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14715 gcc_checking_assert (kind == NULL);
14716 kind = "OpenACC";
14719 if (kind == NULL)
14721 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14722 kind = "OpenMP";
14725 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14726 so we could traverse it and issue a correct "exit" or "enter" error
14727 message upon a structured block violation.
14729 We built the context by building a list with tree_cons'ing, but there is
14730 no easy counterpart in gimple tuples. It seems like far too much work
14731 for issuing exit/enter error messages. If someone really misses the
14732 distinct error message... patches welcome. */
14734 #if 0
14735 /* Try to avoid confusing the user by producing and error message
14736 with correct "exit" or "enter" verbiage. We prefer "exit"
14737 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14738 if (branch_ctx == NULL)
14739 exit_p = false;
14740 else
14742 while (label_ctx)
14744 if (TREE_VALUE (label_ctx) == branch_ctx)
14746 exit_p = false;
14747 break;
14749 label_ctx = TREE_CHAIN (label_ctx);
14753 if (exit_p)
14754 error ("invalid exit from %s structured block", kind);
14755 else
14756 error ("invalid entry to %s structured block", kind);
14757 #endif
14759 /* If it's obvious we have an invalid entry, be specific about the error. */
14760 if (branch_ctx == NULL)
14761 error ("invalid entry to %s structured block", kind);
14762 else
14764 /* Otherwise, be vague and lazy, but efficient. */
14765 error ("invalid branch to/from %s structured block", kind);
14768 gsi_replace (gsi_p, gimple_build_nop (), false);
14769 return true;
14772 /* Pass 1: Create a minimal tree of structured blocks, and record
14773 where each label is found. */
14775 static tree
14776 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14777 struct walk_stmt_info *wi)
14779 gimple *context = (gimple *) wi->info;
14780 gimple *inner_context;
14781 gimple *stmt = gsi_stmt (*gsi_p);
14783 *handled_ops_p = true;
14785 switch (gimple_code (stmt))
14787 WALK_SUBSTMTS;
14789 case GIMPLE_OMP_PARALLEL:
14790 case GIMPLE_OMP_TASK:
14791 case GIMPLE_OMP_SCOPE:
14792 case GIMPLE_OMP_SECTIONS:
14793 case GIMPLE_OMP_SINGLE:
14794 case GIMPLE_OMP_SECTION:
14795 case GIMPLE_OMP_STRUCTURED_BLOCK:
14796 case GIMPLE_OMP_MASTER:
14797 case GIMPLE_OMP_MASKED:
14798 case GIMPLE_OMP_ORDERED:
14799 case GIMPLE_OMP_SCAN:
14800 case GIMPLE_OMP_CRITICAL:
14801 case GIMPLE_OMP_TARGET:
14802 case GIMPLE_OMP_TEAMS:
14803 case GIMPLE_OMP_TASKGROUP:
14804 /* The minimal context here is just the current OMP construct. */
14805 inner_context = stmt;
14806 wi->info = inner_context;
14807 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14808 wi->info = context;
14809 break;
14811 case GIMPLE_OMP_FOR:
14812 inner_context = stmt;
14813 wi->info = inner_context;
14814 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14815 walk them. */
14816 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14817 diagnose_sb_1, NULL, wi);
14818 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14819 wi->info = context;
14820 break;
14822 case GIMPLE_LABEL:
14823 splay_tree_insert (all_labels,
14824 (splay_tree_key) gimple_label_label (
14825 as_a <glabel *> (stmt)),
14826 (splay_tree_value) context);
14827 break;
14829 default:
14830 break;
14833 return NULL_TREE;
14836 /* Pass 2: Check each branch and see if its context differs from that of
14837 the destination label's context. */
14839 static tree
14840 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14841 struct walk_stmt_info *wi)
14843 gimple *context = (gimple *) wi->info;
14844 splay_tree_node n;
14845 gimple *stmt = gsi_stmt (*gsi_p);
14847 *handled_ops_p = true;
14849 switch (gimple_code (stmt))
14851 WALK_SUBSTMTS;
14853 case GIMPLE_OMP_PARALLEL:
14854 case GIMPLE_OMP_TASK:
14855 case GIMPLE_OMP_SCOPE:
14856 case GIMPLE_OMP_SECTIONS:
14857 case GIMPLE_OMP_SINGLE:
14858 case GIMPLE_OMP_SECTION:
14859 case GIMPLE_OMP_STRUCTURED_BLOCK:
14860 case GIMPLE_OMP_MASTER:
14861 case GIMPLE_OMP_MASKED:
14862 case GIMPLE_OMP_ORDERED:
14863 case GIMPLE_OMP_SCAN:
14864 case GIMPLE_OMP_CRITICAL:
14865 case GIMPLE_OMP_TARGET:
14866 case GIMPLE_OMP_TEAMS:
14867 case GIMPLE_OMP_TASKGROUP:
14868 wi->info = stmt;
14869 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14870 wi->info = context;
14871 break;
14873 case GIMPLE_OMP_FOR:
14874 wi->info = stmt;
14875 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14876 walk them. */
14877 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14878 diagnose_sb_2, NULL, wi);
14879 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14880 wi->info = context;
14881 break;
14883 case GIMPLE_COND:
14885 gcond *cond_stmt = as_a <gcond *> (stmt);
14886 tree lab = gimple_cond_true_label (cond_stmt);
14887 if (lab)
14889 n = splay_tree_lookup (all_labels,
14890 (splay_tree_key) lab);
14891 diagnose_sb_0 (gsi_p, context,
14892 n ? (gimple *) n->value : NULL);
14894 lab = gimple_cond_false_label (cond_stmt);
14895 if (lab)
14897 n = splay_tree_lookup (all_labels,
14898 (splay_tree_key) lab);
14899 diagnose_sb_0 (gsi_p, context,
14900 n ? (gimple *) n->value : NULL);
14903 break;
14905 case GIMPLE_GOTO:
14907 tree lab = gimple_goto_dest (stmt);
14908 if (TREE_CODE (lab) != LABEL_DECL)
14909 break;
14911 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14912 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14914 break;
14916 case GIMPLE_SWITCH:
14918 gswitch *switch_stmt = as_a <gswitch *> (stmt);
14919 unsigned int i;
14920 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
14922 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
14923 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14924 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
14925 break;
14928 break;
14930 case GIMPLE_RETURN:
14931 diagnose_sb_0 (gsi_p, context, NULL);
14932 break;
14934 default:
14935 break;
14938 return NULL_TREE;
14941 static unsigned int
14942 diagnose_omp_structured_block_errors (void)
14944 struct walk_stmt_info wi;
14945 gimple_seq body = gimple_body (current_function_decl);
14947 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
14949 memset (&wi, 0, sizeof (wi));
14950 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
14952 memset (&wi, 0, sizeof (wi));
14953 wi.want_locations = true;
14954 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
14956 gimple_set_body (current_function_decl, body);
14958 splay_tree_delete (all_labels);
14959 all_labels = NULL;
14961 return 0;
14964 namespace {
14966 const pass_data pass_data_diagnose_omp_blocks =
14968 GIMPLE_PASS, /* type */
14969 "*diagnose_omp_blocks", /* name */
14970 OPTGROUP_OMP, /* optinfo_flags */
14971 TV_NONE, /* tv_id */
14972 PROP_gimple_any, /* properties_required */
14973 0, /* properties_provided */
14974 0, /* properties_destroyed */
14975 0, /* todo_flags_start */
14976 0, /* todo_flags_finish */
14979 class pass_diagnose_omp_blocks : public gimple_opt_pass
14981 public:
14982 pass_diagnose_omp_blocks (gcc::context *ctxt)
14983 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
14986 /* opt_pass methods: */
14987 bool gate (function *) final override
14989 return flag_openacc || flag_openmp || flag_openmp_simd;
14991 unsigned int execute (function *) final override
14993 return diagnose_omp_structured_block_errors ();
14996 }; // class pass_diagnose_omp_blocks
14998 } // anon namespace
15000 gimple_opt_pass *
15001 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
15003 return new pass_diagnose_omp_blocks (ctxt);
15007 #include "gt-omp-low.h"