Suppress -fstack-protector warning on hppa.
[official-gcc.git] / gcc / omp-low.cc
blob82a93d00f67f6660955b3fa11ca6a8471d31a142
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2022 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "gimplify.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
154 /* True if this construct can be cancelled. */
155 bool cancellable;
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap make_addressable_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
194 static vec<gomp_task *> task_cpyfns;
196 static void scan_omp (gimple_seq *, omp_context *);
197 static tree scan_omp_1_op (tree *, int *, void *);
198 static bool omp_maybe_offloaded_ctx (omp_context *ctx);
200 #define WALK_SUBSTMTS \
201 case GIMPLE_BIND: \
202 case GIMPLE_TRY: \
203 case GIMPLE_CATCH: \
204 case GIMPLE_EH_FILTER: \
205 case GIMPLE_ASSUME: \
206 case GIMPLE_TRANSACTION: \
207 /* The sub-statements for these should be walked. */ \
208 *handled_ops_p = false; \
209 break;
211 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
212 (This doesn't include OpenACC 'kernels' decomposed parts.) */
214 static bool
215 is_oacc_parallel_or_serial (omp_context *ctx)
217 enum gimple_code outer_type = gimple_code (ctx->stmt);
218 return ((outer_type == GIMPLE_OMP_TARGET)
219 && ((gimple_omp_target_kind (ctx->stmt)
220 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
221 || (gimple_omp_target_kind (ctx->stmt)
222 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
225 /* Return whether CTX represents an OpenACC 'kernels' construct.
226 (This doesn't include OpenACC 'kernels' decomposed parts.) */
228 static bool
229 is_oacc_kernels (omp_context *ctx)
231 enum gimple_code outer_type = gimple_code (ctx->stmt);
232 return ((outer_type == GIMPLE_OMP_TARGET)
233 && (gimple_omp_target_kind (ctx->stmt)
234 == GF_OMP_TARGET_KIND_OACC_KERNELS));
237 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
239 static bool
240 is_oacc_kernels_decomposed_part (omp_context *ctx)
242 enum gimple_code outer_type = gimple_code (ctx->stmt);
243 return ((outer_type == GIMPLE_OMP_TARGET)
244 && ((gimple_omp_target_kind (ctx->stmt)
245 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
246 || (gimple_omp_target_kind (ctx->stmt)
247 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
248 || (gimple_omp_target_kind (ctx->stmt)
249 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
252 /* Return true if STMT corresponds to an OpenMP target region. */
253 static bool
254 is_omp_target (gimple *stmt)
256 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
258 int kind = gimple_omp_target_kind (stmt);
259 return (kind == GF_OMP_TARGET_KIND_REGION
260 || kind == GF_OMP_TARGET_KIND_DATA
261 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
262 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
264 return false;
267 /* If DECL is the artificial dummy VAR_DECL created for non-static
268 data member privatization, return the underlying "this" parameter,
269 otherwise return NULL. */
271 tree
272 omp_member_access_dummy_var (tree decl)
274 if (!VAR_P (decl)
275 || !DECL_ARTIFICIAL (decl)
276 || !DECL_IGNORED_P (decl)
277 || !DECL_HAS_VALUE_EXPR_P (decl)
278 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
279 return NULL_TREE;
281 tree v = DECL_VALUE_EXPR (decl);
282 if (TREE_CODE (v) != COMPONENT_REF)
283 return NULL_TREE;
285 while (1)
286 switch (TREE_CODE (v))
288 case COMPONENT_REF:
289 case MEM_REF:
290 case INDIRECT_REF:
291 CASE_CONVERT:
292 case POINTER_PLUS_EXPR:
293 v = TREE_OPERAND (v, 0);
294 continue;
295 case PARM_DECL:
296 if (DECL_CONTEXT (v) == current_function_decl
297 && DECL_ARTIFICIAL (v)
298 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
299 return v;
300 return NULL_TREE;
301 default:
302 return NULL_TREE;
306 /* Helper for unshare_and_remap, called through walk_tree. */
308 static tree
309 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
311 tree *pair = (tree *) data;
312 if (*tp == pair[0])
314 *tp = unshare_expr (pair[1]);
315 *walk_subtrees = 0;
317 else if (IS_TYPE_OR_DECL_P (*tp))
318 *walk_subtrees = 0;
319 return NULL_TREE;
322 /* Return unshare_expr (X) with all occurrences of FROM
323 replaced with TO. */
325 static tree
326 unshare_and_remap (tree x, tree from, tree to)
328 tree pair[2] = { from, to };
329 x = unshare_expr (x);
330 walk_tree (&x, unshare_and_remap_1, pair, NULL);
331 return x;
334 /* Convenience function for calling scan_omp_1_op on tree operands. */
336 static inline tree
337 scan_omp_op (tree *tp, omp_context *ctx)
339 struct walk_stmt_info wi;
341 memset (&wi, 0, sizeof (wi));
342 wi.info = ctx;
343 wi.want_locations = true;
345 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
348 static void lower_omp (gimple_seq *, omp_context *);
349 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
350 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
352 /* Return true if CTX is for an omp parallel. */
354 static inline bool
355 is_parallel_ctx (omp_context *ctx)
357 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
361 /* Return true if CTX is for an omp task. */
363 static inline bool
364 is_task_ctx (omp_context *ctx)
366 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
370 /* Return true if CTX is for an omp taskloop. */
372 static inline bool
373 is_taskloop_ctx (omp_context *ctx)
375 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
376 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
380 /* Return true if CTX is for a host omp teams. */
382 static inline bool
383 is_host_teams_ctx (omp_context *ctx)
385 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
386 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
389 /* Return true if CTX is for an omp parallel or omp task or host omp teams
390 (the last one is strictly not a task region in OpenMP speak, but we
391 need to treat it similarly). */
393 static inline bool
394 is_taskreg_ctx (omp_context *ctx)
396 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
399 /* Return true if EXPR is variable sized. */
401 static inline bool
402 is_variable_sized (const_tree expr)
404 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
407 /* Lookup variables. The "maybe" form
408 allows for the variable form to not have been entered, otherwise we
409 assert that the variable must have been entered. */
411 static inline tree
412 lookup_decl (tree var, omp_context *ctx)
414 tree *n = ctx->cb.decl_map->get (var);
415 return *n;
418 static inline tree
419 maybe_lookup_decl (const_tree var, omp_context *ctx)
421 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
422 return n ? *n : NULL_TREE;
425 static inline tree
426 lookup_field (tree var, omp_context *ctx)
428 splay_tree_node n;
429 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
430 return (tree) n->value;
433 static inline tree
434 lookup_sfield (splay_tree_key key, omp_context *ctx)
436 splay_tree_node n;
437 n = splay_tree_lookup (ctx->sfield_map
438 ? ctx->sfield_map : ctx->field_map, key);
439 return (tree) n->value;
442 static inline tree
443 lookup_sfield (tree var, omp_context *ctx)
445 return lookup_sfield ((splay_tree_key) var, ctx);
448 static inline tree
449 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
451 splay_tree_node n;
452 n = splay_tree_lookup (ctx->field_map, key);
453 return n ? (tree) n->value : NULL_TREE;
456 static inline tree
457 maybe_lookup_field (tree var, omp_context *ctx)
459 return maybe_lookup_field ((splay_tree_key) var, ctx);
462 /* Return true if DECL should be copied by pointer. SHARED_CTX is
463 the parallel context if DECL is to be shared. */
465 static bool
466 use_pointer_for_field (tree decl, omp_context *shared_ctx)
468 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
469 || TYPE_ATOMIC (TREE_TYPE (decl)))
470 return true;
472 /* We can only use copy-in/copy-out semantics for shared variables
473 when we know the value is not accessible from an outer scope. */
474 if (shared_ctx)
476 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
478 /* ??? Trivially accessible from anywhere. But why would we even
479 be passing an address in this case? Should we simply assert
480 this to be false, or should we have a cleanup pass that removes
481 these from the list of mappings? */
482 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
483 return true;
485 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
486 without analyzing the expression whether or not its location
487 is accessible to anyone else. In the case of nested parallel
488 regions it certainly may be. */
489 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
490 return true;
492 /* Do not use copy-in/copy-out for variables that have their
493 address taken. */
494 if (is_global_var (decl))
496 /* For file scope vars, track whether we've seen them as
497 non-addressable initially and in that case, keep the same
498 answer for the duration of the pass, even when they are made
499 addressable later on e.g. through reduction expansion. Global
500 variables which weren't addressable before the pass will not
501 have their privatized copies address taken. See PR91216. */
502 if (!TREE_ADDRESSABLE (decl))
504 if (!global_nonaddressable_vars)
505 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
506 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
508 else if (!global_nonaddressable_vars
509 || !bitmap_bit_p (global_nonaddressable_vars,
510 DECL_UID (decl)))
511 return true;
513 else if (TREE_ADDRESSABLE (decl))
514 return true;
516 /* lower_send_shared_vars only uses copy-in, but not copy-out
517 for these. */
518 if (TREE_READONLY (decl)
519 || ((TREE_CODE (decl) == RESULT_DECL
520 || TREE_CODE (decl) == PARM_DECL)
521 && DECL_BY_REFERENCE (decl)))
522 return false;
524 /* Disallow copy-in/out in nested parallel if
525 decl is shared in outer parallel, otherwise
526 each thread could store the shared variable
527 in its own copy-in location, making the
528 variable no longer really shared. */
529 if (shared_ctx->is_nested)
531 omp_context *up;
533 for (up = shared_ctx->outer; up; up = up->outer)
534 if ((is_taskreg_ctx (up)
535 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
536 && is_gimple_omp_offloaded (up->stmt)))
537 && maybe_lookup_decl (decl, up))
538 break;
540 if (up)
542 tree c;
544 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
546 for (c = gimple_omp_target_clauses (up->stmt);
547 c; c = OMP_CLAUSE_CHAIN (c))
548 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
549 && OMP_CLAUSE_DECL (c) == decl)
550 break;
552 else
553 for (c = gimple_omp_taskreg_clauses (up->stmt);
554 c; c = OMP_CLAUSE_CHAIN (c))
555 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
556 && OMP_CLAUSE_DECL (c) == decl)
557 break;
559 if (c)
560 goto maybe_mark_addressable_and_ret;
564 /* For tasks avoid using copy-in/out. As tasks can be
565 deferred or executed in different thread, when GOMP_task
566 returns, the task hasn't necessarily terminated. */
567 if (is_task_ctx (shared_ctx))
569 tree outer;
570 maybe_mark_addressable_and_ret:
571 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
572 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
574 /* Taking address of OUTER in lower_send_shared_vars
575 might need regimplification of everything that uses the
576 variable. */
577 if (!make_addressable_vars)
578 make_addressable_vars = BITMAP_ALLOC (NULL);
579 bitmap_set_bit (make_addressable_vars, DECL_UID (outer));
580 TREE_ADDRESSABLE (outer) = 1;
582 return true;
586 return false;
589 /* Construct a new automatic decl similar to VAR. */
591 static tree
592 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
594 tree copy = copy_var_decl (var, name, type);
596 DECL_CONTEXT (copy) = current_function_decl;
598 if (ctx)
600 DECL_CHAIN (copy) = ctx->block_vars;
601 ctx->block_vars = copy;
603 else
604 record_vars (copy);
606 /* If VAR is listed in make_addressable_vars, it wasn't
607 originally addressable, but was only later made so.
608 We don't need to take address of privatizations
609 from that var. */
610 if (TREE_ADDRESSABLE (var)
611 && ((make_addressable_vars
612 && bitmap_bit_p (make_addressable_vars, DECL_UID (var)))
613 || (global_nonaddressable_vars
614 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
615 TREE_ADDRESSABLE (copy) = 0;
617 return copy;
620 static tree
621 omp_copy_decl_1 (tree var, omp_context *ctx)
623 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
626 /* Build tree nodes to access the field for VAR on the receiver side. */
628 static tree
629 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
631 tree x, field = lookup_field (var, ctx);
633 /* If the receiver record type was remapped in the child function,
634 remap the field into the new record type. */
635 x = maybe_lookup_field (field, ctx);
636 if (x != NULL)
637 field = x;
639 x = build_simple_mem_ref (ctx->receiver_decl);
640 TREE_THIS_NOTRAP (x) = 1;
641 x = omp_build_component_ref (x, field);
642 if (by_ref)
644 x = build_simple_mem_ref (x);
645 TREE_THIS_NOTRAP (x) = 1;
648 return x;
651 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
652 of a parallel, this is a component reference; for workshare constructs
653 this is some variable. */
655 static tree
656 build_outer_var_ref (tree var, omp_context *ctx,
657 enum omp_clause_code code = OMP_CLAUSE_ERROR)
659 tree x;
660 omp_context *outer = ctx->outer;
661 for (; outer; outer = outer->outer)
663 if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
664 continue;
665 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
666 && !maybe_lookup_decl (var, outer))
667 continue;
668 break;
671 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
672 x = var;
673 else if (is_variable_sized (var))
675 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
676 x = build_outer_var_ref (x, ctx, code);
677 x = build_simple_mem_ref (x);
679 else if (is_taskreg_ctx (ctx))
681 bool by_ref = use_pointer_for_field (var, NULL);
682 x = build_receiver_ref (var, by_ref, ctx);
684 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
685 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
686 || ctx->loop_p
687 || code == OMP_CLAUSE_ALLOCATE
688 || (code == OMP_CLAUSE_PRIVATE
689 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
690 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
691 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
693 /* #pragma omp simd isn't a worksharing construct, and can reference
694 even private vars in its linear etc. clauses.
695 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
696 to private vars in all worksharing constructs. */
697 x = NULL_TREE;
698 if (outer && is_taskreg_ctx (outer))
699 x = lookup_decl (var, outer);
700 else if (outer)
701 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
702 if (x == NULL_TREE)
703 x = var;
705 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
707 gcc_assert (outer);
708 splay_tree_node n
709 = splay_tree_lookup (outer->field_map,
710 (splay_tree_key) &DECL_UID (var));
711 if (n == NULL)
713 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
714 x = var;
715 else
716 x = lookup_decl (var, outer);
718 else
720 tree field = (tree) n->value;
721 /* If the receiver record type was remapped in the child function,
722 remap the field into the new record type. */
723 x = maybe_lookup_field (field, outer);
724 if (x != NULL)
725 field = x;
727 x = build_simple_mem_ref (outer->receiver_decl);
728 x = omp_build_component_ref (x, field);
729 if (use_pointer_for_field (var, outer))
730 x = build_simple_mem_ref (x);
733 else if (outer)
734 x = lookup_decl (var, outer);
735 else if (omp_privatize_by_reference (var))
736 /* This can happen with orphaned constructs. If var is reference, it is
737 possible it is shared and as such valid. */
738 x = var;
739 else if (omp_member_access_dummy_var (var))
740 x = var;
741 else
742 gcc_unreachable ();
744 if (x == var)
746 tree t = omp_member_access_dummy_var (var);
747 if (t)
749 x = DECL_VALUE_EXPR (var);
750 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
751 if (o != t)
752 x = unshare_and_remap (x, t, o);
753 else
754 x = unshare_expr (x);
758 if (omp_privatize_by_reference (var))
759 x = build_simple_mem_ref (x);
761 return x;
764 /* Build tree nodes to access the field for VAR on the sender side. */
766 static tree
767 build_sender_ref (splay_tree_key key, omp_context *ctx)
769 tree field = lookup_sfield (key, ctx);
770 return omp_build_component_ref (ctx->sender_decl, field);
773 static tree
774 build_sender_ref (tree var, omp_context *ctx)
776 return build_sender_ref ((splay_tree_key) var, ctx);
779 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
780 BASE_POINTERS_RESTRICT, declare the field with restrict. */
782 static void
783 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
785 tree field, type, sfield = NULL_TREE;
786 splay_tree_key key = (splay_tree_key) var;
788 if ((mask & 16) != 0)
790 key = (splay_tree_key) &DECL_NAME (var);
791 gcc_checking_assert (key != (splay_tree_key) var);
793 if ((mask & 8) != 0)
795 key = (splay_tree_key) &DECL_UID (var);
796 gcc_checking_assert (key != (splay_tree_key) var);
798 gcc_assert ((mask & 1) == 0
799 || !splay_tree_lookup (ctx->field_map, key));
800 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
801 || !splay_tree_lookup (ctx->sfield_map, key));
802 gcc_assert ((mask & 3) == 3
803 || !is_gimple_omp_oacc (ctx->stmt));
805 type = TREE_TYPE (var);
806 if ((mask & 16) != 0)
807 type = lang_hooks.decls.omp_array_data (var, true);
809 /* Prevent redeclaring the var in the split-off function with a restrict
810 pointer type. Note that we only clear type itself, restrict qualifiers in
811 the pointed-to type will be ignored by points-to analysis. */
812 if (POINTER_TYPE_P (type)
813 && TYPE_RESTRICT (type))
814 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
816 if (mask & 4)
818 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
819 type = build_pointer_type (build_pointer_type (type));
821 else if (by_ref)
822 type = build_pointer_type (type);
823 else if ((mask & (32 | 3)) == 1
824 && omp_privatize_by_reference (var))
825 type = TREE_TYPE (type);
827 field = build_decl (DECL_SOURCE_LOCATION (var),
828 FIELD_DECL, DECL_NAME (var), type);
830 /* Remember what variable this field was created for. This does have a
831 side effect of making dwarf2out ignore this member, so for helpful
832 debugging we clear it later in delete_omp_context. */
833 DECL_ABSTRACT_ORIGIN (field) = var;
834 if ((mask & 16) == 0 && type == TREE_TYPE (var))
836 SET_DECL_ALIGN (field, DECL_ALIGN (var));
837 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
838 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
840 else
841 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
843 if ((mask & 3) == 3)
845 insert_field_into_struct (ctx->record_type, field);
846 if (ctx->srecord_type)
848 sfield = build_decl (DECL_SOURCE_LOCATION (var),
849 FIELD_DECL, DECL_NAME (var), type);
850 DECL_ABSTRACT_ORIGIN (sfield) = var;
851 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
852 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
853 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
854 insert_field_into_struct (ctx->srecord_type, sfield);
857 else
859 if (ctx->srecord_type == NULL_TREE)
861 tree t;
863 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
864 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
865 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
867 sfield = build_decl (DECL_SOURCE_LOCATION (t),
868 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
869 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
870 insert_field_into_struct (ctx->srecord_type, sfield);
871 splay_tree_insert (ctx->sfield_map,
872 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
873 (splay_tree_value) sfield);
876 sfield = field;
877 insert_field_into_struct ((mask & 1) ? ctx->record_type
878 : ctx->srecord_type, field);
881 if (mask & 1)
882 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
883 if ((mask & 2) && ctx->sfield_map)
884 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
887 static tree
888 install_var_local (tree var, omp_context *ctx)
890 tree new_var = omp_copy_decl_1 (var, ctx);
891 insert_decl_map (&ctx->cb, var, new_var);
892 return new_var;
895 /* Adjust the replacement for DECL in CTX for the new context. This means
896 copying the DECL_VALUE_EXPR, and fixing up the type. */
898 static void
899 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
901 tree new_decl, size;
903 new_decl = lookup_decl (decl, ctx);
905 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
907 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
908 && DECL_HAS_VALUE_EXPR_P (decl))
910 tree ve = DECL_VALUE_EXPR (decl);
911 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
912 SET_DECL_VALUE_EXPR (new_decl, ve);
913 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
916 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
918 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
919 if (size == error_mark_node)
920 size = TYPE_SIZE (TREE_TYPE (new_decl));
921 DECL_SIZE (new_decl) = size;
923 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
924 if (size == error_mark_node)
925 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
926 DECL_SIZE_UNIT (new_decl) = size;
930 /* The callback for remap_decl. Search all containing contexts for a
931 mapping of the variable; this avoids having to duplicate the splay
932 tree ahead of time. We know a mapping doesn't already exist in the
933 given context. Create new mappings to implement default semantics. */
935 static tree
936 omp_copy_decl (tree var, copy_body_data *cb)
938 omp_context *ctx = (omp_context *) cb;
939 tree new_var;
941 if (TREE_CODE (var) == LABEL_DECL)
943 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
944 return var;
945 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
946 DECL_CONTEXT (new_var) = current_function_decl;
947 insert_decl_map (&ctx->cb, var, new_var);
948 return new_var;
951 while (!is_taskreg_ctx (ctx))
953 ctx = ctx->outer;
954 if (ctx == NULL)
955 return var;
956 new_var = maybe_lookup_decl (var, ctx);
957 if (new_var)
958 return new_var;
961 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
962 return var;
964 return error_mark_node;
967 /* Create a new context, with OUTER_CTX being the surrounding context. */
969 static omp_context *
970 new_omp_context (gimple *stmt, omp_context *outer_ctx)
972 omp_context *ctx = XCNEW (omp_context);
974 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
975 (splay_tree_value) ctx);
976 ctx->stmt = stmt;
978 if (outer_ctx)
980 ctx->outer = outer_ctx;
981 ctx->cb = outer_ctx->cb;
982 ctx->cb.block = NULL;
983 ctx->depth = outer_ctx->depth + 1;
985 else
987 ctx->cb.src_fn = current_function_decl;
988 ctx->cb.dst_fn = current_function_decl;
989 ctx->cb.src_node = cgraph_node::get (current_function_decl);
990 gcc_checking_assert (ctx->cb.src_node);
991 ctx->cb.dst_node = ctx->cb.src_node;
992 ctx->cb.src_cfun = cfun;
993 ctx->cb.copy_decl = omp_copy_decl;
994 ctx->cb.eh_lp_nr = 0;
995 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
996 ctx->cb.adjust_array_error_bounds = true;
997 ctx->cb.dont_remap_vla_if_no_change = true;
998 ctx->depth = 1;
1001 ctx->cb.decl_map = new hash_map<tree, tree>;
1003 return ctx;
1006 static gimple_seq maybe_catch_exception (gimple_seq);
1008 /* Finalize task copyfn. */
1010 static void
1011 finalize_task_copyfn (gomp_task *task_stmt)
1013 struct function *child_cfun;
1014 tree child_fn;
1015 gimple_seq seq = NULL, new_seq;
1016 gbind *bind;
1018 child_fn = gimple_omp_task_copy_fn (task_stmt);
1019 if (child_fn == NULL_TREE)
1020 return;
1022 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1023 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1025 push_cfun (child_cfun);
1026 bind = gimplify_body (child_fn, false);
1027 gimple_seq_add_stmt (&seq, bind);
1028 new_seq = maybe_catch_exception (seq);
1029 if (new_seq != seq)
1031 bind = gimple_build_bind (NULL, new_seq, NULL);
1032 seq = NULL;
1033 gimple_seq_add_stmt (&seq, bind);
1035 gimple_set_body (child_fn, seq);
1036 pop_cfun ();
1038 /* Inform the callgraph about the new function. */
1039 cgraph_node *node = cgraph_node::get_create (child_fn);
1040 node->parallelized_function = 1;
1041 cgraph_node::add_new_function (child_fn, false);
1044 /* Destroy a omp_context data structures. Called through the splay tree
1045 value delete callback. */
1047 static void
1048 delete_omp_context (splay_tree_value value)
1050 omp_context *ctx = (omp_context *) value;
1052 delete ctx->cb.decl_map;
1054 if (ctx->field_map)
1055 splay_tree_delete (ctx->field_map);
1056 if (ctx->sfield_map)
1057 splay_tree_delete (ctx->sfield_map);
1059 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1060 it produces corrupt debug information. */
1061 if (ctx->record_type)
1063 tree t;
1064 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1065 DECL_ABSTRACT_ORIGIN (t) = NULL;
1067 if (ctx->srecord_type)
1069 tree t;
1070 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1071 DECL_ABSTRACT_ORIGIN (t) = NULL;
1074 if (ctx->task_reduction_map)
1076 ctx->task_reductions.release ();
1077 delete ctx->task_reduction_map;
1080 delete ctx->lastprivate_conditional_map;
1081 delete ctx->allocate_map;
1083 XDELETE (ctx);
1086 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1087 context. */
1089 static void
1090 fixup_child_record_type (omp_context *ctx)
1092 tree f, type = ctx->record_type;
1094 if (!ctx->receiver_decl)
1095 return;
1096 /* ??? It isn't sufficient to just call remap_type here, because
1097 variably_modified_type_p doesn't work the way we expect for
1098 record types. Testing each field for whether it needs remapping
1099 and creating a new record by hand works, however. */
1100 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1101 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1102 break;
1103 if (f)
1105 tree name, new_fields = NULL;
1107 type = lang_hooks.types.make_type (RECORD_TYPE);
1108 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1109 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1110 TYPE_DECL, name, type);
1111 TYPE_NAME (type) = name;
1113 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1115 tree new_f = copy_node (f);
1116 DECL_CONTEXT (new_f) = type;
1117 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1118 DECL_CHAIN (new_f) = new_fields;
1119 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1120 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1121 &ctx->cb, NULL);
1122 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1123 &ctx->cb, NULL);
1124 new_fields = new_f;
1126 /* Arrange to be able to look up the receiver field
1127 given the sender field. */
1128 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1129 (splay_tree_value) new_f);
1131 TYPE_FIELDS (type) = nreverse (new_fields);
1132 layout_type (type);
1135 /* In a target region we never modify any of the pointers in *.omp_data_i,
1136 so attempt to help the optimizers. */
1137 if (is_gimple_omp_offloaded (ctx->stmt))
1138 type = build_qualified_type (type, TYPE_QUAL_CONST);
1140 TREE_TYPE (ctx->receiver_decl)
1141 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1144 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1145 specified by CLAUSES. */
1147 static void
1148 scan_sharing_clauses (tree clauses, omp_context *ctx)
1150 tree c, decl;
1151 bool scan_array_reductions = false;
1153 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1154 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1155 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1156 /* omp_default_mem_alloc is 1 */
1157 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1158 || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1160 /* The allocate clauses that appear on a target construct or on
1161 constructs in a target region must specify an allocator expression
1162 unless a requires directive with the dynamic_allocators clause
1163 is present in the same compilation unit. */
1164 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1165 && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS) == 0)
1166 && omp_maybe_offloaded_ctx (ctx))
1167 error_at (OMP_CLAUSE_LOCATION (c), "%<allocate%> clause must"
1168 " specify an allocator here");
1169 if (ctx->allocate_map == NULL)
1170 ctx->allocate_map = new hash_map<tree, tree>;
1171 tree val = integer_zero_node;
1172 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1173 val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1174 if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1175 val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1176 ctx->allocate_map->put (OMP_CLAUSE_DECL (c), val);
1179 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1181 bool by_ref;
1183 switch (OMP_CLAUSE_CODE (c))
1185 case OMP_CLAUSE_PRIVATE:
1186 decl = OMP_CLAUSE_DECL (c);
1187 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1188 goto do_private;
1189 else if (!is_variable_sized (decl))
1190 install_var_local (decl, ctx);
1191 break;
1193 case OMP_CLAUSE_SHARED:
1194 decl = OMP_CLAUSE_DECL (c);
1195 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1196 ctx->allocate_map->remove (decl);
1197 /* Ignore shared directives in teams construct inside of
1198 target construct. */
1199 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1200 && !is_host_teams_ctx (ctx))
1202 /* Global variables don't need to be copied,
1203 the receiver side will use them directly. */
1204 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1205 if (is_global_var (odecl))
1206 break;
1207 insert_decl_map (&ctx->cb, decl, odecl);
1208 break;
1210 gcc_assert (is_taskreg_ctx (ctx));
1211 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1212 || !is_variable_sized (decl));
1213 /* Global variables don't need to be copied,
1214 the receiver side will use them directly. */
1215 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1216 break;
1217 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1219 use_pointer_for_field (decl, ctx);
1220 break;
1222 by_ref = use_pointer_for_field (decl, NULL);
1223 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1224 || TREE_ADDRESSABLE (decl)
1225 || by_ref
1226 || omp_privatize_by_reference (decl))
1228 by_ref = use_pointer_for_field (decl, ctx);
1229 install_var_field (decl, by_ref, 3, ctx);
1230 install_var_local (decl, ctx);
1231 break;
1233 /* We don't need to copy const scalar vars back. */
1234 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1235 goto do_private;
1237 case OMP_CLAUSE_REDUCTION:
1238 /* Collect 'reduction' clauses on OpenACC compute construct. */
1239 if (is_gimple_omp_oacc (ctx->stmt)
1240 && is_gimple_omp_offloaded (ctx->stmt))
1242 /* No 'reduction' clauses on OpenACC 'kernels'. */
1243 gcc_checking_assert (!is_oacc_kernels (ctx));
1244 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1245 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1247 ctx->local_reduction_clauses
1248 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1250 /* FALLTHRU */
1252 case OMP_CLAUSE_IN_REDUCTION:
1253 decl = OMP_CLAUSE_DECL (c);
1254 if (ctx->allocate_map
1255 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1256 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1257 || OMP_CLAUSE_REDUCTION_TASK (c)))
1258 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1259 || is_task_ctx (ctx)))
1261 /* For now. */
1262 if (ctx->allocate_map->get (decl))
1263 ctx->allocate_map->remove (decl);
1265 if (TREE_CODE (decl) == MEM_REF)
1267 tree t = TREE_OPERAND (decl, 0);
1268 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1269 t = TREE_OPERAND (t, 0);
1270 if (TREE_CODE (t) == INDIRECT_REF
1271 || TREE_CODE (t) == ADDR_EXPR)
1272 t = TREE_OPERAND (t, 0);
1273 if (is_omp_target (ctx->stmt))
1275 if (is_variable_sized (t))
1277 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1278 t = DECL_VALUE_EXPR (t);
1279 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
1280 t = TREE_OPERAND (t, 0);
1281 gcc_assert (DECL_P (t));
1283 tree at = t;
1284 if (ctx->outer)
1285 scan_omp_op (&at, ctx->outer);
1286 tree nt = omp_copy_decl_1 (at, ctx->outer);
1287 splay_tree_insert (ctx->field_map,
1288 (splay_tree_key) &DECL_CONTEXT (t),
1289 (splay_tree_value) nt);
1290 if (at != t)
1291 splay_tree_insert (ctx->field_map,
1292 (splay_tree_key) &DECL_CONTEXT (at),
1293 (splay_tree_value) nt);
1294 break;
1296 install_var_local (t, ctx);
1297 if (is_taskreg_ctx (ctx)
1298 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1299 || (is_task_ctx (ctx)
1300 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1301 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1302 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1303 == POINTER_TYPE)))))
1304 && !is_variable_sized (t)
1305 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1306 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1307 && !is_task_ctx (ctx))))
1309 by_ref = use_pointer_for_field (t, NULL);
1310 if (is_task_ctx (ctx)
1311 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1312 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1314 install_var_field (t, false, 1, ctx);
1315 install_var_field (t, by_ref, 2, ctx);
1317 else
1318 install_var_field (t, by_ref, 3, ctx);
1320 break;
1322 if (is_omp_target (ctx->stmt))
1324 tree at = decl;
1325 if (ctx->outer)
1326 scan_omp_op (&at, ctx->outer);
1327 tree nt = omp_copy_decl_1 (at, ctx->outer);
1328 splay_tree_insert (ctx->field_map,
1329 (splay_tree_key) &DECL_CONTEXT (decl),
1330 (splay_tree_value) nt);
1331 if (at != decl)
1332 splay_tree_insert (ctx->field_map,
1333 (splay_tree_key) &DECL_CONTEXT (at),
1334 (splay_tree_value) nt);
1335 break;
1337 if (is_task_ctx (ctx)
1338 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1339 && OMP_CLAUSE_REDUCTION_TASK (c)
1340 && is_parallel_ctx (ctx)))
1342 /* Global variables don't need to be copied,
1343 the receiver side will use them directly. */
1344 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1346 by_ref = use_pointer_for_field (decl, ctx);
1347 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1348 install_var_field (decl, by_ref, 3, ctx);
1350 install_var_local (decl, ctx);
1351 break;
1353 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1354 && OMP_CLAUSE_REDUCTION_TASK (c))
1356 install_var_local (decl, ctx);
1357 break;
1359 goto do_private;
1361 case OMP_CLAUSE_LASTPRIVATE:
1362 /* Let the corresponding firstprivate clause create
1363 the variable. */
1364 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1365 break;
1366 /* FALLTHRU */
1368 case OMP_CLAUSE_FIRSTPRIVATE:
1369 case OMP_CLAUSE_LINEAR:
1370 decl = OMP_CLAUSE_DECL (c);
1371 do_private:
1372 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1373 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1374 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1375 && is_gimple_omp_offloaded (ctx->stmt))
1377 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1378 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR
1379 && lang_hooks.decls.omp_array_data (decl, true)))
1381 by_ref = !omp_privatize_by_reference (decl);
1382 install_var_field (decl, by_ref, 3, ctx);
1384 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1386 if (TREE_CODE (decl) == INDIRECT_REF)
1387 decl = TREE_OPERAND (decl, 0);
1388 install_var_field (decl, true, 3, ctx);
1390 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1391 install_var_field (decl, true, 3, ctx);
1392 else
1393 install_var_field (decl, false, 3, ctx);
1395 if (is_variable_sized (decl))
1397 if (is_task_ctx (ctx))
1399 if (ctx->allocate_map
1400 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1402 /* For now. */
1403 if (ctx->allocate_map->get (decl))
1404 ctx->allocate_map->remove (decl);
1406 install_var_field (decl, false, 1, ctx);
1408 break;
1410 else if (is_taskreg_ctx (ctx))
1412 bool global
1413 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1414 by_ref = use_pointer_for_field (decl, NULL);
1416 if (is_task_ctx (ctx)
1417 && (global || by_ref || omp_privatize_by_reference (decl)))
1419 if (ctx->allocate_map
1420 && ctx->allocate_map->get (decl))
1421 install_var_field (decl, by_ref, 32 | 1, ctx);
1422 else
1423 install_var_field (decl, false, 1, ctx);
1424 if (!global)
1425 install_var_field (decl, by_ref, 2, ctx);
1427 else if (!global)
1428 install_var_field (decl, by_ref, 3, ctx);
1430 install_var_local (decl, ctx);
1431 /* For descr arrays on target: firstprivatize data + attach ptr. */
1432 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1433 && is_gimple_omp_offloaded (ctx->stmt)
1434 && !is_gimple_omp_oacc (ctx->stmt)
1435 && lang_hooks.decls.omp_array_data (decl, true))
1437 install_var_field (decl, false, 16 | 3, ctx);
1438 install_var_field (decl, true, 8 | 3, ctx);
1440 break;
1442 case OMP_CLAUSE_USE_DEVICE_PTR:
1443 case OMP_CLAUSE_USE_DEVICE_ADDR:
1444 decl = OMP_CLAUSE_DECL (c);
1446 /* Fortran array descriptors. */
1447 if (lang_hooks.decls.omp_array_data (decl, true))
1448 install_var_field (decl, false, 19, ctx);
1449 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1450 && !omp_privatize_by_reference (decl)
1451 && !omp_is_allocatable_or_ptr (decl))
1452 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1453 install_var_field (decl, true, 11, ctx);
1454 else
1455 install_var_field (decl, false, 11, ctx);
1456 if (DECL_SIZE (decl)
1457 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1459 tree decl2 = DECL_VALUE_EXPR (decl);
1460 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1461 decl2 = TREE_OPERAND (decl2, 0);
1462 gcc_assert (DECL_P (decl2));
1463 install_var_local (decl2, ctx);
1465 install_var_local (decl, ctx);
1466 break;
1468 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1469 decl = OMP_CLAUSE_DECL (c);
1470 while (TREE_CODE (decl) == INDIRECT_REF
1471 || TREE_CODE (decl) == ARRAY_REF)
1472 decl = TREE_OPERAND (decl, 0);
1473 goto do_private;
1475 case OMP_CLAUSE_IS_DEVICE_PTR:
1476 decl = OMP_CLAUSE_DECL (c);
1477 goto do_private;
1479 case OMP_CLAUSE__LOOPTEMP_:
1480 case OMP_CLAUSE__REDUCTEMP_:
1481 gcc_assert (is_taskreg_ctx (ctx));
1482 decl = OMP_CLAUSE_DECL (c);
1483 install_var_field (decl, false, 3, ctx);
1484 install_var_local (decl, ctx);
1485 break;
1487 case OMP_CLAUSE_COPYPRIVATE:
1488 case OMP_CLAUSE_COPYIN:
1489 decl = OMP_CLAUSE_DECL (c);
1490 by_ref = use_pointer_for_field (decl, NULL);
1491 install_var_field (decl, by_ref, 3, ctx);
1492 break;
1494 case OMP_CLAUSE_FINAL:
1495 case OMP_CLAUSE_IF:
1496 case OMP_CLAUSE_NUM_THREADS:
1497 case OMP_CLAUSE_NUM_TEAMS:
1498 case OMP_CLAUSE_THREAD_LIMIT:
1499 case OMP_CLAUSE_DEVICE:
1500 case OMP_CLAUSE_SCHEDULE:
1501 case OMP_CLAUSE_DIST_SCHEDULE:
1502 case OMP_CLAUSE_DEPEND:
1503 case OMP_CLAUSE_PRIORITY:
1504 case OMP_CLAUSE_GRAINSIZE:
1505 case OMP_CLAUSE_NUM_TASKS:
1506 case OMP_CLAUSE_NUM_GANGS:
1507 case OMP_CLAUSE_NUM_WORKERS:
1508 case OMP_CLAUSE_VECTOR_LENGTH:
1509 case OMP_CLAUSE_DETACH:
1510 case OMP_CLAUSE_FILTER:
1511 if (ctx->outer)
1512 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1513 break;
1515 case OMP_CLAUSE_TO:
1516 case OMP_CLAUSE_FROM:
1517 case OMP_CLAUSE_MAP:
1518 if (ctx->outer)
1519 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1520 decl = OMP_CLAUSE_DECL (c);
1521 /* If requested, make 'decl' addressable. */
1522 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1523 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c))
1525 gcc_checking_assert (DECL_P (decl));
1527 bool decl_addressable = TREE_ADDRESSABLE (decl);
1528 if (!decl_addressable)
1530 if (!make_addressable_vars)
1531 make_addressable_vars = BITMAP_ALLOC (NULL);
1532 bitmap_set_bit (make_addressable_vars, DECL_UID (decl));
1533 TREE_ADDRESSABLE (decl) = 1;
1536 if (dump_enabled_p ())
1538 location_t loc = OMP_CLAUSE_LOCATION (c);
1539 const dump_user_location_t d_u_loc
1540 = dump_user_location_t::from_location_t (loc);
1541 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1542 #if __GNUC__ >= 10
1543 # pragma GCC diagnostic push
1544 # pragma GCC diagnostic ignored "-Wformat"
1545 #endif
1546 if (!decl_addressable)
1547 dump_printf_loc (MSG_NOTE, d_u_loc,
1548 "variable %<%T%>"
1549 " made addressable\n",
1550 decl);
1551 else
1552 dump_printf_loc (MSG_NOTE, d_u_loc,
1553 "variable %<%T%>"
1554 " already made addressable\n",
1555 decl);
1556 #if __GNUC__ >= 10
1557 # pragma GCC diagnostic pop
1558 #endif
1561 /* Done. */
1562 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c) = 0;
1564 /* Global variables with "omp declare target" attribute
1565 don't need to be copied, the receiver side will use them
1566 directly. However, global variables with "omp declare target link"
1567 attribute need to be copied. Or when ALWAYS modifier is used. */
1568 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1569 && DECL_P (decl)
1570 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1571 && (OMP_CLAUSE_MAP_KIND (c)
1572 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1573 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1574 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1575 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1576 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1577 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1578 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1579 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1580 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1581 && varpool_node::get_create (decl)->offloadable
1582 && !lookup_attribute ("omp declare target link",
1583 DECL_ATTRIBUTES (decl)))
1584 break;
1585 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1586 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1588 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1589 not offloaded; there is nothing to map for those. */
1590 if (!is_gimple_omp_offloaded (ctx->stmt)
1591 && !POINTER_TYPE_P (TREE_TYPE (decl))
1592 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1593 break;
1595 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1596 && DECL_P (decl)
1597 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1598 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1599 && is_omp_target (ctx->stmt))
1601 /* If this is an offloaded region, an attach operation should
1602 only exist when the pointer variable is mapped in a prior
1603 clause.
1604 If we had an error, we may not have attempted to sort clauses
1605 properly, so avoid the test. */
1606 if (is_gimple_omp_offloaded (ctx->stmt)
1607 && !seen_error ())
1608 gcc_assert
1609 (maybe_lookup_decl (decl, ctx)
1610 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1611 && lookup_attribute ("omp declare target",
1612 DECL_ATTRIBUTES (decl))));
1614 /* By itself, attach/detach is generated as part of pointer
1615 variable mapping and should not create new variables in the
1616 offloaded region, however sender refs for it must be created
1617 for its address to be passed to the runtime. */
1618 tree field
1619 = build_decl (OMP_CLAUSE_LOCATION (c),
1620 FIELD_DECL, NULL_TREE, ptr_type_node);
1621 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1622 insert_field_into_struct (ctx->record_type, field);
1623 /* To not clash with a map of the pointer variable itself,
1624 attach/detach maps have their field looked up by the *clause*
1625 tree expression, not the decl. */
1626 gcc_assert (!splay_tree_lookup (ctx->field_map,
1627 (splay_tree_key) c));
1628 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1629 (splay_tree_value) field);
1630 break;
1632 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1633 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1634 || (OMP_CLAUSE_MAP_KIND (c)
1635 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1637 if (TREE_CODE (decl) == COMPONENT_REF
1638 || (TREE_CODE (decl) == INDIRECT_REF
1639 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1640 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1641 == REFERENCE_TYPE)
1642 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1643 == POINTER_TYPE)))))
1644 break;
1645 if (DECL_SIZE (decl)
1646 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1648 tree decl2 = DECL_VALUE_EXPR (decl);
1649 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1650 decl2 = TREE_OPERAND (decl2, 0);
1651 gcc_assert (DECL_P (decl2));
1652 install_var_local (decl2, ctx);
1654 install_var_local (decl, ctx);
1655 break;
1657 if (DECL_P (decl))
1659 if (DECL_SIZE (decl)
1660 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1662 tree decl2 = DECL_VALUE_EXPR (decl);
1663 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1664 decl2 = TREE_OPERAND (decl2, 0);
1665 gcc_assert (DECL_P (decl2));
1666 install_var_field (decl2, true, 3, ctx);
1667 install_var_local (decl2, ctx);
1668 install_var_local (decl, ctx);
1670 else
1672 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1673 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1674 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1675 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1676 install_var_field (decl, true, 7, ctx);
1677 else
1678 install_var_field (decl, true, 3, ctx);
1679 if (is_gimple_omp_offloaded (ctx->stmt)
1680 && !(is_gimple_omp_oacc (ctx->stmt)
1681 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1682 install_var_local (decl, ctx);
1685 else
1687 tree base = get_base_address (decl);
1688 tree nc = OMP_CLAUSE_CHAIN (c);
1689 if (DECL_P (base)
1690 && nc != NULL_TREE
1691 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1692 && OMP_CLAUSE_DECL (nc) == base
1693 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1694 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1696 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1697 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1699 else
1701 if (ctx->outer)
1703 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1704 decl = OMP_CLAUSE_DECL (c);
1706 gcc_assert (!splay_tree_lookup (ctx->field_map,
1707 (splay_tree_key) decl));
1708 tree field
1709 = build_decl (OMP_CLAUSE_LOCATION (c),
1710 FIELD_DECL, NULL_TREE, ptr_type_node);
1711 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1712 insert_field_into_struct (ctx->record_type, field);
1713 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1714 (splay_tree_value) field);
1717 break;
1719 case OMP_CLAUSE_ORDER:
1720 ctx->order_concurrent = true;
1721 break;
1723 case OMP_CLAUSE_BIND:
1724 ctx->loop_p = true;
1725 break;
1727 case OMP_CLAUSE_NOWAIT:
1728 case OMP_CLAUSE_ORDERED:
1729 case OMP_CLAUSE_COLLAPSE:
1730 case OMP_CLAUSE_UNTIED:
1731 case OMP_CLAUSE_MERGEABLE:
1732 case OMP_CLAUSE_PROC_BIND:
1733 case OMP_CLAUSE_SAFELEN:
1734 case OMP_CLAUSE_SIMDLEN:
1735 case OMP_CLAUSE_THREADS:
1736 case OMP_CLAUSE_SIMD:
1737 case OMP_CLAUSE_NOGROUP:
1738 case OMP_CLAUSE_DEFAULTMAP:
1739 case OMP_CLAUSE_ASYNC:
1740 case OMP_CLAUSE_WAIT:
1741 case OMP_CLAUSE_GANG:
1742 case OMP_CLAUSE_WORKER:
1743 case OMP_CLAUSE_VECTOR:
1744 case OMP_CLAUSE_INDEPENDENT:
1745 case OMP_CLAUSE_AUTO:
1746 case OMP_CLAUSE_SEQ:
1747 case OMP_CLAUSE_TILE:
1748 case OMP_CLAUSE__SIMT_:
1749 case OMP_CLAUSE_DEFAULT:
1750 case OMP_CLAUSE_NONTEMPORAL:
1751 case OMP_CLAUSE_IF_PRESENT:
1752 case OMP_CLAUSE_FINALIZE:
1753 case OMP_CLAUSE_TASK_REDUCTION:
1754 case OMP_CLAUSE_ALLOCATE:
1755 break;
1757 case OMP_CLAUSE_ALIGNED:
1758 decl = OMP_CLAUSE_DECL (c);
1759 if (is_global_var (decl)
1760 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1761 install_var_local (decl, ctx);
1762 break;
1764 case OMP_CLAUSE__CONDTEMP_:
1765 decl = OMP_CLAUSE_DECL (c);
1766 if (is_parallel_ctx (ctx))
1768 install_var_field (decl, false, 3, ctx);
1769 install_var_local (decl, ctx);
1771 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1772 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1773 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1774 install_var_local (decl, ctx);
1775 break;
1777 case OMP_CLAUSE__CACHE_:
1778 case OMP_CLAUSE_NOHOST:
1779 default:
1780 gcc_unreachable ();
1784 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1786 switch (OMP_CLAUSE_CODE (c))
1788 case OMP_CLAUSE_LASTPRIVATE:
1789 /* Let the corresponding firstprivate clause create
1790 the variable. */
1791 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1792 scan_array_reductions = true;
1793 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1794 break;
1795 /* FALLTHRU */
1797 case OMP_CLAUSE_FIRSTPRIVATE:
1798 case OMP_CLAUSE_PRIVATE:
1799 case OMP_CLAUSE_LINEAR:
1800 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1801 case OMP_CLAUSE_IS_DEVICE_PTR:
1802 decl = OMP_CLAUSE_DECL (c);
1803 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1805 while (TREE_CODE (decl) == INDIRECT_REF
1806 || TREE_CODE (decl) == ARRAY_REF)
1807 decl = TREE_OPERAND (decl, 0);
1810 if (is_variable_sized (decl))
1812 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1813 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1814 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1815 && is_gimple_omp_offloaded (ctx->stmt))
1817 tree decl2 = DECL_VALUE_EXPR (decl);
1818 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1819 decl2 = TREE_OPERAND (decl2, 0);
1820 gcc_assert (DECL_P (decl2));
1821 install_var_local (decl2, ctx);
1822 fixup_remapped_decl (decl2, ctx, false);
1824 install_var_local (decl, ctx);
1826 fixup_remapped_decl (decl, ctx,
1827 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1828 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1829 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1830 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1831 scan_array_reductions = true;
1832 break;
1834 case OMP_CLAUSE_REDUCTION:
1835 case OMP_CLAUSE_IN_REDUCTION:
1836 decl = OMP_CLAUSE_DECL (c);
1837 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1839 if (is_variable_sized (decl))
1840 install_var_local (decl, ctx);
1841 fixup_remapped_decl (decl, ctx, false);
1843 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1844 scan_array_reductions = true;
1845 break;
1847 case OMP_CLAUSE_TASK_REDUCTION:
1848 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1849 scan_array_reductions = true;
1850 break;
1852 case OMP_CLAUSE_SHARED:
1853 /* Ignore shared directives in teams construct inside of
1854 target construct. */
1855 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1856 && !is_host_teams_ctx (ctx))
1857 break;
1858 decl = OMP_CLAUSE_DECL (c);
1859 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1860 break;
1861 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1863 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1864 ctx->outer)))
1865 break;
1866 bool by_ref = use_pointer_for_field (decl, ctx);
1867 install_var_field (decl, by_ref, 11, ctx);
1868 break;
1870 fixup_remapped_decl (decl, ctx, false);
1871 break;
1873 case OMP_CLAUSE_MAP:
1874 if (!is_gimple_omp_offloaded (ctx->stmt))
1875 break;
1876 decl = OMP_CLAUSE_DECL (c);
1877 if (DECL_P (decl)
1878 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1879 && (OMP_CLAUSE_MAP_KIND (c)
1880 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1881 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1882 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1883 && varpool_node::get_create (decl)->offloadable)
1884 break;
1885 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1886 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1887 && is_omp_target (ctx->stmt)
1888 && !is_gimple_omp_offloaded (ctx->stmt))
1889 break;
1890 if (DECL_P (decl))
1892 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1893 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1894 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1895 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1897 tree new_decl = lookup_decl (decl, ctx);
1898 TREE_TYPE (new_decl)
1899 = remap_type (TREE_TYPE (decl), &ctx->cb);
1901 else if (DECL_SIZE (decl)
1902 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1904 tree decl2 = DECL_VALUE_EXPR (decl);
1905 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1906 decl2 = TREE_OPERAND (decl2, 0);
1907 gcc_assert (DECL_P (decl2));
1908 fixup_remapped_decl (decl2, ctx, false);
1909 fixup_remapped_decl (decl, ctx, true);
1911 else
1912 fixup_remapped_decl (decl, ctx, false);
1914 break;
1916 case OMP_CLAUSE_COPYPRIVATE:
1917 case OMP_CLAUSE_COPYIN:
1918 case OMP_CLAUSE_DEFAULT:
1919 case OMP_CLAUSE_IF:
1920 case OMP_CLAUSE_NUM_THREADS:
1921 case OMP_CLAUSE_NUM_TEAMS:
1922 case OMP_CLAUSE_THREAD_LIMIT:
1923 case OMP_CLAUSE_DEVICE:
1924 case OMP_CLAUSE_SCHEDULE:
1925 case OMP_CLAUSE_DIST_SCHEDULE:
1926 case OMP_CLAUSE_NOWAIT:
1927 case OMP_CLAUSE_ORDERED:
1928 case OMP_CLAUSE_COLLAPSE:
1929 case OMP_CLAUSE_UNTIED:
1930 case OMP_CLAUSE_FINAL:
1931 case OMP_CLAUSE_MERGEABLE:
1932 case OMP_CLAUSE_PROC_BIND:
1933 case OMP_CLAUSE_SAFELEN:
1934 case OMP_CLAUSE_SIMDLEN:
1935 case OMP_CLAUSE_ALIGNED:
1936 case OMP_CLAUSE_DEPEND:
1937 case OMP_CLAUSE_DETACH:
1938 case OMP_CLAUSE_ALLOCATE:
1939 case OMP_CLAUSE__LOOPTEMP_:
1940 case OMP_CLAUSE__REDUCTEMP_:
1941 case OMP_CLAUSE_TO:
1942 case OMP_CLAUSE_FROM:
1943 case OMP_CLAUSE_PRIORITY:
1944 case OMP_CLAUSE_GRAINSIZE:
1945 case OMP_CLAUSE_NUM_TASKS:
1946 case OMP_CLAUSE_THREADS:
1947 case OMP_CLAUSE_SIMD:
1948 case OMP_CLAUSE_NOGROUP:
1949 case OMP_CLAUSE_DEFAULTMAP:
1950 case OMP_CLAUSE_ORDER:
1951 case OMP_CLAUSE_BIND:
1952 case OMP_CLAUSE_USE_DEVICE_PTR:
1953 case OMP_CLAUSE_USE_DEVICE_ADDR:
1954 case OMP_CLAUSE_NONTEMPORAL:
1955 case OMP_CLAUSE_ASYNC:
1956 case OMP_CLAUSE_WAIT:
1957 case OMP_CLAUSE_NUM_GANGS:
1958 case OMP_CLAUSE_NUM_WORKERS:
1959 case OMP_CLAUSE_VECTOR_LENGTH:
1960 case OMP_CLAUSE_GANG:
1961 case OMP_CLAUSE_WORKER:
1962 case OMP_CLAUSE_VECTOR:
1963 case OMP_CLAUSE_INDEPENDENT:
1964 case OMP_CLAUSE_AUTO:
1965 case OMP_CLAUSE_SEQ:
1966 case OMP_CLAUSE_TILE:
1967 case OMP_CLAUSE__SIMT_:
1968 case OMP_CLAUSE_IF_PRESENT:
1969 case OMP_CLAUSE_FINALIZE:
1970 case OMP_CLAUSE_FILTER:
1971 case OMP_CLAUSE__CONDTEMP_:
1972 break;
1974 case OMP_CLAUSE__CACHE_:
1975 case OMP_CLAUSE_NOHOST:
1976 default:
1977 gcc_unreachable ();
1981 gcc_checking_assert (!scan_array_reductions
1982 || !is_gimple_omp_oacc (ctx->stmt));
1983 if (scan_array_reductions)
1985 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1986 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1987 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1988 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1989 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1991 omp_context *rctx = ctx;
1992 if (is_omp_target (ctx->stmt))
1993 rctx = ctx->outer;
1994 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1995 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
1997 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1998 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1999 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
2000 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
2001 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
2002 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
2006 /* Create a new name for omp child function. Returns an identifier. */
2008 static tree
2009 create_omp_child_function_name (bool task_copy)
2011 return clone_function_name_numbered (current_function_decl,
2012 task_copy ? "_omp_cpyfn" : "_omp_fn");
2015 /* Return true if CTX may belong to offloaded code: either if current function
2016 is offloaded, or any enclosing context corresponds to a target region. */
2018 static bool
2019 omp_maybe_offloaded_ctx (omp_context *ctx)
2021 if (cgraph_node::get (current_function_decl)->offloadable)
2022 return true;
2023 for (; ctx; ctx = ctx->outer)
2024 if (is_gimple_omp_offloaded (ctx->stmt))
2025 return true;
2026 return false;
2029 /* Build a decl for the omp child function. It'll not contain a body
2030 yet, just the bare decl. */
2032 static void
2033 create_omp_child_function (omp_context *ctx, bool task_copy)
2035 tree decl, type, name, t;
2037 name = create_omp_child_function_name (task_copy);
2038 if (task_copy)
2039 type = build_function_type_list (void_type_node, ptr_type_node,
2040 ptr_type_node, NULL_TREE);
2041 else
2042 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2044 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
2046 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
2047 || !task_copy);
2048 if (!task_copy)
2049 ctx->cb.dst_fn = decl;
2050 else
2051 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
2053 TREE_STATIC (decl) = 1;
2054 TREE_USED (decl) = 1;
2055 DECL_ARTIFICIAL (decl) = 1;
2056 DECL_IGNORED_P (decl) = 0;
2057 TREE_PUBLIC (decl) = 0;
2058 DECL_UNINLINABLE (decl) = 1;
2059 DECL_EXTERNAL (decl) = 0;
2060 DECL_CONTEXT (decl) = NULL_TREE;
2061 DECL_INITIAL (decl) = make_node (BLOCK);
2062 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
2063 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
2064 /* Remove omp declare simd attribute from the new attributes. */
2065 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
2067 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
2068 a = a2;
2069 a = TREE_CHAIN (a);
2070 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
2071 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
2072 *p = TREE_CHAIN (*p);
2073 else
2075 tree chain = TREE_CHAIN (*p);
2076 *p = copy_node (*p);
2077 p = &TREE_CHAIN (*p);
2078 *p = chain;
2081 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
2082 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
2083 DECL_FUNCTION_SPECIFIC_TARGET (decl)
2084 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2085 DECL_FUNCTION_VERSIONED (decl)
2086 = DECL_FUNCTION_VERSIONED (current_function_decl);
2088 if (omp_maybe_offloaded_ctx (ctx))
2090 cgraph_node::get_create (decl)->offloadable = 1;
2091 if (ENABLE_OFFLOADING)
2092 g->have_offload = true;
2095 if (cgraph_node::get_create (decl)->offloadable)
2097 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2098 ? "omp target entrypoint"
2099 : "omp declare target");
2100 if (lookup_attribute ("omp declare target",
2101 DECL_ATTRIBUTES (current_function_decl)))
2103 if (is_gimple_omp_offloaded (ctx->stmt))
2104 DECL_ATTRIBUTES (decl)
2105 = remove_attribute ("omp declare target",
2106 copy_list (DECL_ATTRIBUTES (decl)));
2107 else
2108 target_attr = NULL;
2110 if (target_attr
2111 && is_gimple_omp_offloaded (ctx->stmt)
2112 && lookup_attribute ("noclone", DECL_ATTRIBUTES (decl)) == NULL_TREE)
2113 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("noclone"),
2114 NULL_TREE, DECL_ATTRIBUTES (decl));
2115 if (target_attr)
2116 DECL_ATTRIBUTES (decl)
2117 = tree_cons (get_identifier (target_attr),
2118 NULL_TREE, DECL_ATTRIBUTES (decl));
2121 t = build_decl (DECL_SOURCE_LOCATION (decl),
2122 RESULT_DECL, NULL_TREE, void_type_node);
2123 DECL_ARTIFICIAL (t) = 1;
2124 DECL_IGNORED_P (t) = 1;
2125 DECL_CONTEXT (t) = decl;
2126 DECL_RESULT (decl) = t;
2128 tree data_name = get_identifier (".omp_data_i");
2129 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2130 ptr_type_node);
2131 DECL_ARTIFICIAL (t) = 1;
2132 DECL_NAMELESS (t) = 1;
2133 DECL_ARG_TYPE (t) = ptr_type_node;
2134 DECL_CONTEXT (t) = current_function_decl;
2135 TREE_USED (t) = 1;
2136 TREE_READONLY (t) = 1;
2137 DECL_ARGUMENTS (decl) = t;
2138 if (!task_copy)
2139 ctx->receiver_decl = t;
2140 else
2142 t = build_decl (DECL_SOURCE_LOCATION (decl),
2143 PARM_DECL, get_identifier (".omp_data_o"),
2144 ptr_type_node);
2145 DECL_ARTIFICIAL (t) = 1;
2146 DECL_NAMELESS (t) = 1;
2147 DECL_ARG_TYPE (t) = ptr_type_node;
2148 DECL_CONTEXT (t) = current_function_decl;
2149 TREE_USED (t) = 1;
2150 TREE_ADDRESSABLE (t) = 1;
2151 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2152 DECL_ARGUMENTS (decl) = t;
2155 /* Allocate memory for the function structure. The call to
2156 allocate_struct_function clobbers CFUN, so we need to restore
2157 it afterward. */
2158 push_struct_function (decl);
2159 cfun->function_end_locus = gimple_location (ctx->stmt);
2160 init_tree_ssa (cfun);
2161 pop_cfun ();
2164 /* Callback for walk_gimple_seq. Check if combined parallel
2165 contains gimple_omp_for_combined_into_p OMP_FOR. */
2167 tree
2168 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2169 bool *handled_ops_p,
2170 struct walk_stmt_info *wi)
2172 gimple *stmt = gsi_stmt (*gsi_p);
2174 *handled_ops_p = true;
2175 switch (gimple_code (stmt))
2177 WALK_SUBSTMTS;
2179 case GIMPLE_OMP_FOR:
2180 if (gimple_omp_for_combined_into_p (stmt)
2181 && gimple_omp_for_kind (stmt)
2182 == *(const enum gf_mask *) (wi->info))
2184 wi->info = stmt;
2185 return integer_zero_node;
2187 break;
2188 default:
2189 break;
2191 return NULL;
2194 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2196 static void
2197 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2198 omp_context *outer_ctx)
2200 struct walk_stmt_info wi;
2202 memset (&wi, 0, sizeof (wi));
2203 wi.val_only = true;
2204 wi.info = (void *) &msk;
2205 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2206 if (wi.info != (void *) &msk)
2208 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2209 struct omp_for_data fd;
2210 omp_extract_for_data (for_stmt, &fd, NULL);
2211 /* We need two temporaries with fd.loop.v type (istart/iend)
2212 and then (fd.collapse - 1) temporaries with the same
2213 type for count2 ... countN-1 vars if not constant. */
2214 size_t count = 2, i;
2215 tree type = fd.iter_type;
2216 if (fd.collapse > 1
2217 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2219 count += fd.collapse - 1;
2220 /* If there are lastprivate clauses on the inner
2221 GIMPLE_OMP_FOR, add one more temporaries for the total number
2222 of iterations (product of count1 ... countN-1). */
2223 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2224 OMP_CLAUSE_LASTPRIVATE)
2225 || (msk == GF_OMP_FOR_KIND_FOR
2226 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2227 OMP_CLAUSE_LASTPRIVATE)))
2229 tree temp = create_tmp_var (type);
2230 tree c = build_omp_clause (UNKNOWN_LOCATION,
2231 OMP_CLAUSE__LOOPTEMP_);
2232 insert_decl_map (&outer_ctx->cb, temp, temp);
2233 OMP_CLAUSE_DECL (c) = temp;
2234 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2235 gimple_omp_taskreg_set_clauses (stmt, c);
2237 if (fd.non_rect
2238 && fd.last_nonrect == fd.first_nonrect + 1)
2239 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2240 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2242 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2243 tree type2 = TREE_TYPE (v);
2244 count++;
2245 for (i = 0; i < 3; i++)
2247 tree temp = create_tmp_var (type2);
2248 tree c = build_omp_clause (UNKNOWN_LOCATION,
2249 OMP_CLAUSE__LOOPTEMP_);
2250 insert_decl_map (&outer_ctx->cb, temp, temp);
2251 OMP_CLAUSE_DECL (c) = temp;
2252 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2253 gimple_omp_taskreg_set_clauses (stmt, c);
2257 for (i = 0; i < count; i++)
2259 tree temp = create_tmp_var (type);
2260 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2261 insert_decl_map (&outer_ctx->cb, temp, temp);
2262 OMP_CLAUSE_DECL (c) = temp;
2263 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2264 gimple_omp_taskreg_set_clauses (stmt, c);
2267 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2268 && omp_find_clause (gimple_omp_task_clauses (stmt),
2269 OMP_CLAUSE_REDUCTION))
2271 tree type = build_pointer_type (pointer_sized_int_node);
2272 tree temp = create_tmp_var (type);
2273 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2274 insert_decl_map (&outer_ctx->cb, temp, temp);
2275 OMP_CLAUSE_DECL (c) = temp;
2276 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2277 gimple_omp_task_set_clauses (stmt, c);
2281 /* Scan an OpenMP parallel directive. */
2283 static void
2284 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2286 omp_context *ctx;
2287 tree name;
2288 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2290 /* Ignore parallel directives with empty bodies, unless there
2291 are copyin clauses. */
2292 if (optimize > 0
2293 && empty_body_p (gimple_omp_body (stmt))
2294 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2295 OMP_CLAUSE_COPYIN) == NULL)
2297 gsi_replace (gsi, gimple_build_nop (), false);
2298 return;
2301 if (gimple_omp_parallel_combined_p (stmt))
2302 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2303 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2304 OMP_CLAUSE_REDUCTION);
2305 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2306 if (OMP_CLAUSE_REDUCTION_TASK (c))
2308 tree type = build_pointer_type (pointer_sized_int_node);
2309 tree temp = create_tmp_var (type);
2310 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2311 if (outer_ctx)
2312 insert_decl_map (&outer_ctx->cb, temp, temp);
2313 OMP_CLAUSE_DECL (c) = temp;
2314 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2315 gimple_omp_parallel_set_clauses (stmt, c);
2316 break;
2318 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2319 break;
2321 ctx = new_omp_context (stmt, outer_ctx);
2322 taskreg_contexts.safe_push (ctx);
2323 if (taskreg_nesting_level > 1)
2324 ctx->is_nested = true;
2325 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2326 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2327 name = create_tmp_var_name (".omp_data_s");
2328 name = build_decl (gimple_location (stmt),
2329 TYPE_DECL, name, ctx->record_type);
2330 DECL_ARTIFICIAL (name) = 1;
2331 DECL_NAMELESS (name) = 1;
2332 TYPE_NAME (ctx->record_type) = name;
2333 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2334 create_omp_child_function (ctx, false);
2335 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2337 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2338 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2340 if (TYPE_FIELDS (ctx->record_type) == NULL)
2341 ctx->record_type = ctx->receiver_decl = NULL;
2344 /* Scan an OpenMP task directive. */
2346 static void
2347 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2349 omp_context *ctx;
2350 tree name, t;
2351 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2353 /* Ignore task directives with empty bodies, unless they have depend
2354 clause. */
2355 if (optimize > 0
2356 && gimple_omp_body (stmt)
2357 && empty_body_p (gimple_omp_body (stmt))
2358 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2360 gsi_replace (gsi, gimple_build_nop (), false);
2361 return;
2364 if (gimple_omp_task_taskloop_p (stmt))
2365 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2367 ctx = new_omp_context (stmt, outer_ctx);
2369 if (gimple_omp_task_taskwait_p (stmt))
2371 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2372 return;
2375 taskreg_contexts.safe_push (ctx);
2376 if (taskreg_nesting_level > 1)
2377 ctx->is_nested = true;
2378 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2379 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2380 name = create_tmp_var_name (".omp_data_s");
2381 name = build_decl (gimple_location (stmt),
2382 TYPE_DECL, name, ctx->record_type);
2383 DECL_ARTIFICIAL (name) = 1;
2384 DECL_NAMELESS (name) = 1;
2385 TYPE_NAME (ctx->record_type) = name;
2386 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2387 create_omp_child_function (ctx, false);
2388 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2390 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2392 if (ctx->srecord_type)
2394 name = create_tmp_var_name (".omp_data_a");
2395 name = build_decl (gimple_location (stmt),
2396 TYPE_DECL, name, ctx->srecord_type);
2397 DECL_ARTIFICIAL (name) = 1;
2398 DECL_NAMELESS (name) = 1;
2399 TYPE_NAME (ctx->srecord_type) = name;
2400 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2401 create_omp_child_function (ctx, true);
2404 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2406 if (TYPE_FIELDS (ctx->record_type) == NULL)
2408 ctx->record_type = ctx->receiver_decl = NULL;
2409 t = build_int_cst (long_integer_type_node, 0);
2410 gimple_omp_task_set_arg_size (stmt, t);
2411 t = build_int_cst (long_integer_type_node, 1);
2412 gimple_omp_task_set_arg_align (stmt, t);
2416 /* Helper function for finish_taskreg_scan, called through walk_tree.
2417 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2418 tree, replace it in the expression. */
2420 static tree
2421 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2423 if (VAR_P (*tp))
2425 omp_context *ctx = (omp_context *) data;
2426 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2427 if (t != *tp)
2429 if (DECL_HAS_VALUE_EXPR_P (t))
2430 t = unshare_expr (DECL_VALUE_EXPR (t));
2431 *tp = t;
2433 *walk_subtrees = 0;
2435 else if (IS_TYPE_OR_DECL_P (*tp))
2436 *walk_subtrees = 0;
2437 return NULL_TREE;
2440 /* If any decls have been made addressable during scan_omp,
2441 adjust their fields if needed, and layout record types
2442 of parallel/task constructs. */
2444 static void
2445 finish_taskreg_scan (omp_context *ctx)
2447 if (ctx->record_type == NULL_TREE)
2448 return;
2450 /* If any make_addressable_vars were needed, verify all
2451 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2452 statements if use_pointer_for_field hasn't changed
2453 because of that. If it did, update field types now. */
2454 if (make_addressable_vars)
2456 tree c;
2458 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2459 c; c = OMP_CLAUSE_CHAIN (c))
2460 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2461 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2463 tree decl = OMP_CLAUSE_DECL (c);
2465 /* Global variables don't need to be copied,
2466 the receiver side will use them directly. */
2467 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2468 continue;
2469 if (!bitmap_bit_p (make_addressable_vars, DECL_UID (decl))
2470 || !use_pointer_for_field (decl, ctx))
2471 continue;
2472 tree field = lookup_field (decl, ctx);
2473 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2474 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2475 continue;
2476 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2477 TREE_THIS_VOLATILE (field) = 0;
2478 DECL_USER_ALIGN (field) = 0;
2479 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2480 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2481 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2482 if (ctx->srecord_type)
2484 tree sfield = lookup_sfield (decl, ctx);
2485 TREE_TYPE (sfield) = TREE_TYPE (field);
2486 TREE_THIS_VOLATILE (sfield) = 0;
2487 DECL_USER_ALIGN (sfield) = 0;
2488 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2489 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2490 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2495 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2497 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2498 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2499 if (c)
2501 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2502 expects to find it at the start of data. */
2503 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2504 tree *p = &TYPE_FIELDS (ctx->record_type);
2505 while (*p)
2506 if (*p == f)
2508 *p = DECL_CHAIN (*p);
2509 break;
2511 else
2512 p = &DECL_CHAIN (*p);
2513 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2514 TYPE_FIELDS (ctx->record_type) = f;
2516 layout_type (ctx->record_type);
2517 fixup_child_record_type (ctx);
2519 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2521 layout_type (ctx->record_type);
2522 fixup_child_record_type (ctx);
2524 else
2526 location_t loc = gimple_location (ctx->stmt);
2527 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2528 tree detach_clause
2529 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2530 OMP_CLAUSE_DETACH);
2531 /* Move VLA fields to the end. */
2532 p = &TYPE_FIELDS (ctx->record_type);
2533 while (*p)
2534 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2535 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2537 *q = *p;
2538 *p = TREE_CHAIN (*p);
2539 TREE_CHAIN (*q) = NULL_TREE;
2540 q = &TREE_CHAIN (*q);
2542 else
2543 p = &DECL_CHAIN (*p);
2544 *p = vla_fields;
2545 if (gimple_omp_task_taskloop_p (ctx->stmt))
2547 /* Move fields corresponding to first and second _looptemp_
2548 clause first. There are filled by GOMP_taskloop
2549 and thus need to be in specific positions. */
2550 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2551 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2552 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2553 OMP_CLAUSE__LOOPTEMP_);
2554 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2555 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2556 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2557 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2558 p = &TYPE_FIELDS (ctx->record_type);
2559 while (*p)
2560 if (*p == f1 || *p == f2 || *p == f3)
2561 *p = DECL_CHAIN (*p);
2562 else
2563 p = &DECL_CHAIN (*p);
2564 DECL_CHAIN (f1) = f2;
2565 if (c3)
2567 DECL_CHAIN (f2) = f3;
2568 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2570 else
2571 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2572 TYPE_FIELDS (ctx->record_type) = f1;
2573 if (ctx->srecord_type)
2575 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2576 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2577 if (c3)
2578 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2579 p = &TYPE_FIELDS (ctx->srecord_type);
2580 while (*p)
2581 if (*p == f1 || *p == f2 || *p == f3)
2582 *p = DECL_CHAIN (*p);
2583 else
2584 p = &DECL_CHAIN (*p);
2585 DECL_CHAIN (f1) = f2;
2586 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2587 if (c3)
2589 DECL_CHAIN (f2) = f3;
2590 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2592 else
2593 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2594 TYPE_FIELDS (ctx->srecord_type) = f1;
2597 if (detach_clause)
2599 tree c, field;
2601 /* Look for a firstprivate clause with the detach event handle. */
2602 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2603 c; c = OMP_CLAUSE_CHAIN (c))
2605 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2606 continue;
2607 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2608 == OMP_CLAUSE_DECL (detach_clause))
2609 break;
2612 gcc_assert (c);
2613 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2615 /* Move field corresponding to the detach clause first.
2616 This is filled by GOMP_task and needs to be in a
2617 specific position. */
2618 p = &TYPE_FIELDS (ctx->record_type);
2619 while (*p)
2620 if (*p == field)
2621 *p = DECL_CHAIN (*p);
2622 else
2623 p = &DECL_CHAIN (*p);
2624 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2625 TYPE_FIELDS (ctx->record_type) = field;
2626 if (ctx->srecord_type)
2628 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2629 p = &TYPE_FIELDS (ctx->srecord_type);
2630 while (*p)
2631 if (*p == field)
2632 *p = DECL_CHAIN (*p);
2633 else
2634 p = &DECL_CHAIN (*p);
2635 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2636 TYPE_FIELDS (ctx->srecord_type) = field;
2639 layout_type (ctx->record_type);
2640 fixup_child_record_type (ctx);
2641 if (ctx->srecord_type)
2642 layout_type (ctx->srecord_type);
2643 tree t = fold_convert_loc (loc, long_integer_type_node,
2644 TYPE_SIZE_UNIT (ctx->record_type));
2645 if (TREE_CODE (t) != INTEGER_CST)
2647 t = unshare_expr (t);
2648 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2650 gimple_omp_task_set_arg_size (ctx->stmt, t);
2651 t = build_int_cst (long_integer_type_node,
2652 TYPE_ALIGN_UNIT (ctx->record_type));
2653 gimple_omp_task_set_arg_align (ctx->stmt, t);
2657 /* Find the enclosing offload context. */
2659 static omp_context *
2660 enclosing_target_ctx (omp_context *ctx)
2662 for (; ctx; ctx = ctx->outer)
2663 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2664 break;
2666 return ctx;
2669 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2670 construct.
2671 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2673 static bool
2674 ctx_in_oacc_kernels_region (omp_context *ctx)
2676 for (;ctx != NULL; ctx = ctx->outer)
2678 gimple *stmt = ctx->stmt;
2679 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2680 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2681 return true;
2684 return false;
2687 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2688 (This doesn't include OpenACC 'kernels' decomposed parts.)
2689 Until kernels handling moves to use the same loop indirection
2690 scheme as parallel, we need to do this checking early. */
2692 static unsigned
2693 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2695 bool checking = true;
2696 unsigned outer_mask = 0;
2697 unsigned this_mask = 0;
2698 bool has_seq = false, has_auto = false;
2700 if (ctx->outer)
2701 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2702 if (!stmt)
2704 checking = false;
2705 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2706 return outer_mask;
2707 stmt = as_a <gomp_for *> (ctx->stmt);
2710 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2712 switch (OMP_CLAUSE_CODE (c))
2714 case OMP_CLAUSE_GANG:
2715 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2716 break;
2717 case OMP_CLAUSE_WORKER:
2718 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2719 break;
2720 case OMP_CLAUSE_VECTOR:
2721 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2722 break;
2723 case OMP_CLAUSE_SEQ:
2724 has_seq = true;
2725 break;
2726 case OMP_CLAUSE_AUTO:
2727 has_auto = true;
2728 break;
2729 default:
2730 break;
2734 if (checking)
2736 if (has_seq && (this_mask || has_auto))
2737 error_at (gimple_location (stmt), "%<seq%> overrides other"
2738 " OpenACC loop specifiers");
2739 else if (has_auto && this_mask)
2740 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2741 " OpenACC loop specifiers");
2743 if (this_mask & outer_mask)
2744 error_at (gimple_location (stmt), "inner loop uses same"
2745 " OpenACC parallelism as containing loop");
2748 return outer_mask | this_mask;
2751 /* Scan a GIMPLE_OMP_FOR. */
2753 static omp_context *
2754 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2756 omp_context *ctx;
2757 size_t i;
2758 tree clauses = gimple_omp_for_clauses (stmt);
2760 ctx = new_omp_context (stmt, outer_ctx);
2762 if (is_gimple_omp_oacc (stmt))
2764 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2766 if (!(tgt && is_oacc_kernels (tgt)))
2767 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2769 tree c_op0;
2770 switch (OMP_CLAUSE_CODE (c))
2772 case OMP_CLAUSE_GANG:
2773 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2774 break;
2776 case OMP_CLAUSE_WORKER:
2777 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2778 break;
2780 case OMP_CLAUSE_VECTOR:
2781 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2782 break;
2784 default:
2785 continue;
2788 if (c_op0)
2790 /* By construction, this is impossible for OpenACC 'kernels'
2791 decomposed parts. */
2792 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2794 error_at (OMP_CLAUSE_LOCATION (c),
2795 "argument not permitted on %qs clause",
2796 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2797 if (tgt)
2798 inform (gimple_location (tgt->stmt),
2799 "enclosing parent compute construct");
2800 else if (oacc_get_fn_attrib (current_function_decl))
2801 inform (DECL_SOURCE_LOCATION (current_function_decl),
2802 "enclosing routine");
2803 else
2804 gcc_unreachable ();
2808 if (tgt && is_oacc_kernels (tgt))
2809 check_oacc_kernel_gwv (stmt, ctx);
2811 /* Collect all variables named in reductions on this loop. Ensure
2812 that, if this loop has a reduction on some variable v, and there is
2813 a reduction on v somewhere in an outer context, then there is a
2814 reduction on v on all intervening loops as well. */
2815 tree local_reduction_clauses = NULL;
2816 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2818 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2819 local_reduction_clauses
2820 = tree_cons (NULL, c, local_reduction_clauses);
2822 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2823 ctx->outer_reduction_clauses
2824 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2825 ctx->outer->outer_reduction_clauses);
2826 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2827 tree local_iter = local_reduction_clauses;
2828 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2830 tree local_clause = TREE_VALUE (local_iter);
2831 tree local_var = OMP_CLAUSE_DECL (local_clause);
2832 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2833 bool have_outer_reduction = false;
2834 tree ctx_iter = outer_reduction_clauses;
2835 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2837 tree outer_clause = TREE_VALUE (ctx_iter);
2838 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2839 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2840 if (outer_var == local_var && outer_op != local_op)
2842 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2843 "conflicting reduction operations for %qE",
2844 local_var);
2845 inform (OMP_CLAUSE_LOCATION (outer_clause),
2846 "location of the previous reduction for %qE",
2847 outer_var);
2849 if (outer_var == local_var)
2851 have_outer_reduction = true;
2852 break;
2855 if (have_outer_reduction)
2857 /* There is a reduction on outer_var both on this loop and on
2858 some enclosing loop. Walk up the context tree until such a
2859 loop with a reduction on outer_var is found, and complain
2860 about all intervening loops that do not have such a
2861 reduction. */
2862 struct omp_context *curr_loop = ctx->outer;
2863 bool found = false;
2864 while (curr_loop != NULL)
2866 tree curr_iter = curr_loop->local_reduction_clauses;
2867 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2869 tree curr_clause = TREE_VALUE (curr_iter);
2870 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2871 if (curr_var == local_var)
2873 found = true;
2874 break;
2877 if (!found)
2878 warning_at (gimple_location (curr_loop->stmt), 0,
2879 "nested loop in reduction needs "
2880 "reduction clause for %qE",
2881 local_var);
2882 else
2883 break;
2884 curr_loop = curr_loop->outer;
2888 ctx->local_reduction_clauses = local_reduction_clauses;
2889 ctx->outer_reduction_clauses
2890 = chainon (unshare_expr (ctx->local_reduction_clauses),
2891 ctx->outer_reduction_clauses);
2893 if (tgt && is_oacc_kernels (tgt))
2895 /* Strip out reductions, as they are not handled yet. */
2896 tree *prev_ptr = &clauses;
2898 while (tree probe = *prev_ptr)
2900 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2902 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2903 *prev_ptr = *next_ptr;
2904 else
2905 prev_ptr = next_ptr;
2908 gimple_omp_for_set_clauses (stmt, clauses);
2912 scan_sharing_clauses (clauses, ctx);
2914 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2915 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2917 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2918 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2919 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2920 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2922 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2923 return ctx;
2926 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2928 static void
2929 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2930 omp_context *outer_ctx)
2932 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2933 gsi_replace (gsi, bind, false);
2934 gimple_seq seq = NULL;
2935 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2936 tree cond = create_tmp_var_raw (integer_type_node);
2937 DECL_CONTEXT (cond) = current_function_decl;
2938 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2939 gimple_bind_set_vars (bind, cond);
2940 gimple_call_set_lhs (g, cond);
2941 gimple_seq_add_stmt (&seq, g);
2942 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2943 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2944 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2945 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2946 gimple_seq_add_stmt (&seq, g);
2947 g = gimple_build_label (lab1);
2948 gimple_seq_add_stmt (&seq, g);
2949 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2950 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2951 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2952 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2953 gimple_omp_for_set_clauses (new_stmt, clause);
2954 gimple_seq_add_stmt (&seq, new_stmt);
2955 g = gimple_build_goto (lab3);
2956 gimple_seq_add_stmt (&seq, g);
2957 g = gimple_build_label (lab2);
2958 gimple_seq_add_stmt (&seq, g);
2959 gimple_seq_add_stmt (&seq, stmt);
2960 g = gimple_build_label (lab3);
2961 gimple_seq_add_stmt (&seq, g);
2962 gimple_bind_set_body (bind, seq);
2963 update_stmt (bind);
2964 scan_omp_for (new_stmt, outer_ctx);
2965 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2968 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2969 struct walk_stmt_info *);
2970 static omp_context *maybe_lookup_ctx (gimple *);
2972 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2973 for scan phase loop. */
2975 static void
2976 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2977 omp_context *outer_ctx)
2979 /* The only change between inclusive and exclusive scan will be
2980 within the first simd loop, so just use inclusive in the
2981 worksharing loop. */
2982 outer_ctx->scan_inclusive = true;
2983 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2984 OMP_CLAUSE_DECL (c) = integer_zero_node;
2986 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2987 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2988 gsi_replace (gsi, input_stmt, false);
2989 gimple_seq input_body = NULL;
2990 gimple_seq_add_stmt (&input_body, stmt);
2991 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2993 gimple_stmt_iterator input1_gsi = gsi_none ();
2994 struct walk_stmt_info wi;
2995 memset (&wi, 0, sizeof (wi));
2996 wi.val_only = true;
2997 wi.info = (void *) &input1_gsi;
2998 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2999 gcc_assert (!gsi_end_p (input1_gsi));
3001 gimple *input_stmt1 = gsi_stmt (input1_gsi);
3002 gsi_next (&input1_gsi);
3003 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
3004 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
3005 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
3006 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3007 std::swap (input_stmt1, scan_stmt1);
3009 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
3010 gimple_omp_set_body (input_stmt1, NULL);
3012 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
3013 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
3015 gimple_omp_set_body (input_stmt1, input_body1);
3016 gimple_omp_set_body (scan_stmt1, NULL);
3018 gimple_stmt_iterator input2_gsi = gsi_none ();
3019 memset (&wi, 0, sizeof (wi));
3020 wi.val_only = true;
3021 wi.info = (void *) &input2_gsi;
3022 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
3023 NULL, &wi);
3024 gcc_assert (!gsi_end_p (input2_gsi));
3026 gimple *input_stmt2 = gsi_stmt (input2_gsi);
3027 gsi_next (&input2_gsi);
3028 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
3029 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
3030 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3031 std::swap (input_stmt2, scan_stmt2);
3033 gimple_omp_set_body (input_stmt2, NULL);
3035 gimple_omp_set_body (input_stmt, input_body);
3036 gimple_omp_set_body (scan_stmt, scan_body);
3038 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
3039 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
3041 ctx = new_omp_context (scan_stmt, outer_ctx);
3042 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
3044 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
3047 /* Scan an OpenMP sections directive. */
3049 static void
3050 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
3052 omp_context *ctx;
3054 ctx = new_omp_context (stmt, outer_ctx);
3055 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
3056 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3059 /* Scan an OpenMP single directive. */
3061 static void
3062 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
3064 omp_context *ctx;
3065 tree name;
3067 ctx = new_omp_context (stmt, outer_ctx);
3068 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3069 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3070 name = create_tmp_var_name (".omp_copy_s");
3071 name = build_decl (gimple_location (stmt),
3072 TYPE_DECL, name, ctx->record_type);
3073 TYPE_NAME (ctx->record_type) = name;
3075 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
3076 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3078 if (TYPE_FIELDS (ctx->record_type) == NULL)
3079 ctx->record_type = NULL;
3080 else
3081 layout_type (ctx->record_type);
3084 /* Scan a GIMPLE_OMP_TARGET. */
3086 static void
3087 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3089 omp_context *ctx;
3090 tree name;
3091 bool offloaded = is_gimple_omp_offloaded (stmt);
3092 tree clauses = gimple_omp_target_clauses (stmt);
3094 ctx = new_omp_context (stmt, outer_ctx);
3095 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3096 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3097 name = create_tmp_var_name (".omp_data_t");
3098 name = build_decl (gimple_location (stmt),
3099 TYPE_DECL, name, ctx->record_type);
3100 DECL_ARTIFICIAL (name) = 1;
3101 DECL_NAMELESS (name) = 1;
3102 TYPE_NAME (ctx->record_type) = name;
3103 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3105 if (offloaded)
3107 create_omp_child_function (ctx, false);
3108 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3111 scan_sharing_clauses (clauses, ctx);
3112 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3114 if (TYPE_FIELDS (ctx->record_type) == NULL)
3115 ctx->record_type = ctx->receiver_decl = NULL;
3116 else
3118 TYPE_FIELDS (ctx->record_type)
3119 = nreverse (TYPE_FIELDS (ctx->record_type));
3120 if (flag_checking)
3122 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3123 for (tree field = TYPE_FIELDS (ctx->record_type);
3124 field;
3125 field = DECL_CHAIN (field))
3126 gcc_assert (DECL_ALIGN (field) == align);
3128 layout_type (ctx->record_type);
3129 if (offloaded)
3130 fixup_child_record_type (ctx);
3133 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3135 error_at (gimple_location (stmt),
3136 "%<target%> construct with nested %<teams%> construct "
3137 "contains directives outside of the %<teams%> construct");
3138 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3142 /* Scan an OpenMP teams directive. */
3144 static void
3145 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3147 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3149 if (!gimple_omp_teams_host (stmt))
3151 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3152 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3153 return;
3155 taskreg_contexts.safe_push (ctx);
3156 gcc_assert (taskreg_nesting_level == 1);
3157 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3158 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3159 tree name = create_tmp_var_name (".omp_data_s");
3160 name = build_decl (gimple_location (stmt),
3161 TYPE_DECL, name, ctx->record_type);
3162 DECL_ARTIFICIAL (name) = 1;
3163 DECL_NAMELESS (name) = 1;
3164 TYPE_NAME (ctx->record_type) = name;
3165 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3166 create_omp_child_function (ctx, false);
3167 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3169 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3170 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3172 if (TYPE_FIELDS (ctx->record_type) == NULL)
3173 ctx->record_type = ctx->receiver_decl = NULL;
3176 /* Check nesting restrictions. */
3177 static bool
3178 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3180 tree c;
3182 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3183 inside an OpenACC CTX. */
3184 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3185 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3186 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3188 else if (!(is_gimple_omp (stmt)
3189 && is_gimple_omp_oacc (stmt)))
3191 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3193 error_at (gimple_location (stmt),
3194 "non-OpenACC construct inside of OpenACC routine");
3195 return false;
3197 else
3198 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3199 if (is_gimple_omp (octx->stmt)
3200 && is_gimple_omp_oacc (octx->stmt))
3202 error_at (gimple_location (stmt),
3203 "non-OpenACC construct inside of OpenACC region");
3204 return false;
3208 if (ctx != NULL)
3210 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3211 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3213 c = omp_find_clause (gimple_omp_target_clauses (ctx->stmt),
3214 OMP_CLAUSE_DEVICE);
3215 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3217 error_at (gimple_location (stmt),
3218 "OpenMP constructs are not allowed in target region "
3219 "with %<ancestor%>");
3220 return false;
3223 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3224 ctx->teams_nested_p = true;
3225 else
3226 ctx->nonteams_nested_p = true;
3228 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3229 && ctx->outer
3230 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3231 ctx = ctx->outer;
3232 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3233 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3234 && !ctx->loop_p)
3236 c = NULL_TREE;
3237 if (ctx->order_concurrent
3238 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3239 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3240 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3242 error_at (gimple_location (stmt),
3243 "OpenMP constructs other than %<parallel%>, %<loop%>"
3244 " or %<simd%> may not be nested inside a region with"
3245 " the %<order(concurrent)%> clause");
3246 return false;
3248 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3250 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3251 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3253 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3254 && (ctx->outer == NULL
3255 || !gimple_omp_for_combined_into_p (ctx->stmt)
3256 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3257 || (gimple_omp_for_kind (ctx->outer->stmt)
3258 != GF_OMP_FOR_KIND_FOR)
3259 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3261 error_at (gimple_location (stmt),
3262 "%<ordered simd threads%> must be closely "
3263 "nested inside of %<%s simd%> region",
3264 lang_GNU_Fortran () ? "do" : "for");
3265 return false;
3267 return true;
3270 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3271 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3272 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3273 return true;
3274 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3275 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3276 return true;
3277 error_at (gimple_location (stmt),
3278 "OpenMP constructs other than "
3279 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3280 "not be nested inside %<simd%> region");
3281 return false;
3283 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3285 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3286 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3287 && omp_find_clause (gimple_omp_for_clauses (stmt),
3288 OMP_CLAUSE_BIND) == NULL_TREE))
3289 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3291 error_at (gimple_location (stmt),
3292 "only %<distribute%>, %<parallel%> or %<loop%> "
3293 "regions are allowed to be strictly nested inside "
3294 "%<teams%> region");
3295 return false;
3298 else if (ctx->order_concurrent
3299 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3300 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3301 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3302 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3304 if (ctx->loop_p)
3305 error_at (gimple_location (stmt),
3306 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3307 "%<simd%> may not be nested inside a %<loop%> region");
3308 else
3309 error_at (gimple_location (stmt),
3310 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3311 "%<simd%> may not be nested inside a region with "
3312 "the %<order(concurrent)%> clause");
3313 return false;
3316 switch (gimple_code (stmt))
3318 case GIMPLE_OMP_FOR:
3319 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3320 return true;
3321 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3323 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3325 error_at (gimple_location (stmt),
3326 "%<distribute%> region must be strictly nested "
3327 "inside %<teams%> construct");
3328 return false;
3330 return true;
3332 /* We split taskloop into task and nested taskloop in it. */
3333 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3334 return true;
3335 /* For now, hope this will change and loop bind(parallel) will not
3336 be allowed in lots of contexts. */
3337 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3338 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3339 return true;
3340 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3342 bool ok = false;
3344 if (ctx)
3345 switch (gimple_code (ctx->stmt))
3347 case GIMPLE_OMP_FOR:
3348 ok = (gimple_omp_for_kind (ctx->stmt)
3349 == GF_OMP_FOR_KIND_OACC_LOOP);
3350 break;
3352 case GIMPLE_OMP_TARGET:
3353 switch (gimple_omp_target_kind (ctx->stmt))
3355 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3356 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3357 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3358 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3359 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3360 ok = true;
3361 break;
3363 default:
3364 break;
3367 default:
3368 break;
3370 else if (oacc_get_fn_attrib (current_function_decl))
3371 ok = true;
3372 if (!ok)
3374 error_at (gimple_location (stmt),
3375 "OpenACC loop directive must be associated with"
3376 " an OpenACC compute region");
3377 return false;
3380 /* FALLTHRU */
3381 case GIMPLE_CALL:
3382 if (is_gimple_call (stmt)
3383 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3384 == BUILT_IN_GOMP_CANCEL
3385 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3386 == BUILT_IN_GOMP_CANCELLATION_POINT))
3388 const char *bad = NULL;
3389 const char *kind = NULL;
3390 const char *construct
3391 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3392 == BUILT_IN_GOMP_CANCEL)
3393 ? "cancel"
3394 : "cancellation point";
3395 if (ctx == NULL)
3397 error_at (gimple_location (stmt), "orphaned %qs construct",
3398 construct);
3399 return false;
3401 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3402 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3403 : 0)
3405 case 1:
3406 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3407 bad = "parallel";
3408 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3409 == BUILT_IN_GOMP_CANCEL
3410 && !integer_zerop (gimple_call_arg (stmt, 1)))
3411 ctx->cancellable = true;
3412 kind = "parallel";
3413 break;
3414 case 2:
3415 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3416 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3417 bad = "for";
3418 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3419 == BUILT_IN_GOMP_CANCEL
3420 && !integer_zerop (gimple_call_arg (stmt, 1)))
3422 ctx->cancellable = true;
3423 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3424 OMP_CLAUSE_NOWAIT))
3425 warning_at (gimple_location (stmt), 0,
3426 "%<cancel for%> inside "
3427 "%<nowait%> for construct");
3428 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3429 OMP_CLAUSE_ORDERED))
3430 warning_at (gimple_location (stmt), 0,
3431 "%<cancel for%> inside "
3432 "%<ordered%> for construct");
3434 kind = "for";
3435 break;
3436 case 4:
3437 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3438 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3439 bad = "sections";
3440 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3441 == BUILT_IN_GOMP_CANCEL
3442 && !integer_zerop (gimple_call_arg (stmt, 1)))
3444 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3446 ctx->cancellable = true;
3447 if (omp_find_clause (gimple_omp_sections_clauses
3448 (ctx->stmt),
3449 OMP_CLAUSE_NOWAIT))
3450 warning_at (gimple_location (stmt), 0,
3451 "%<cancel sections%> inside "
3452 "%<nowait%> sections construct");
3454 else
3456 gcc_assert (ctx->outer
3457 && gimple_code (ctx->outer->stmt)
3458 == GIMPLE_OMP_SECTIONS);
3459 ctx->outer->cancellable = true;
3460 if (omp_find_clause (gimple_omp_sections_clauses
3461 (ctx->outer->stmt),
3462 OMP_CLAUSE_NOWAIT))
3463 warning_at (gimple_location (stmt), 0,
3464 "%<cancel sections%> inside "
3465 "%<nowait%> sections construct");
3468 kind = "sections";
3469 break;
3470 case 8:
3471 if (!is_task_ctx (ctx)
3472 && (!is_taskloop_ctx (ctx)
3473 || ctx->outer == NULL
3474 || !is_task_ctx (ctx->outer)))
3475 bad = "task";
3476 else
3478 for (omp_context *octx = ctx->outer;
3479 octx; octx = octx->outer)
3481 switch (gimple_code (octx->stmt))
3483 case GIMPLE_OMP_TASKGROUP:
3484 break;
3485 case GIMPLE_OMP_TARGET:
3486 if (gimple_omp_target_kind (octx->stmt)
3487 != GF_OMP_TARGET_KIND_REGION)
3488 continue;
3489 /* FALLTHRU */
3490 case GIMPLE_OMP_PARALLEL:
3491 case GIMPLE_OMP_TEAMS:
3492 error_at (gimple_location (stmt),
3493 "%<%s taskgroup%> construct not closely "
3494 "nested inside of %<taskgroup%> region",
3495 construct);
3496 return false;
3497 case GIMPLE_OMP_TASK:
3498 if (gimple_omp_task_taskloop_p (octx->stmt)
3499 && octx->outer
3500 && is_taskloop_ctx (octx->outer))
3502 tree clauses
3503 = gimple_omp_for_clauses (octx->outer->stmt);
3504 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3505 break;
3507 continue;
3508 default:
3509 continue;
3511 break;
3513 ctx->cancellable = true;
3515 kind = "taskgroup";
3516 break;
3517 default:
3518 error_at (gimple_location (stmt), "invalid arguments");
3519 return false;
3521 if (bad)
3523 error_at (gimple_location (stmt),
3524 "%<%s %s%> construct not closely nested inside of %qs",
3525 construct, kind, bad);
3526 return false;
3529 /* FALLTHRU */
3530 case GIMPLE_OMP_SECTIONS:
3531 case GIMPLE_OMP_SINGLE:
3532 for (; ctx != NULL; ctx = ctx->outer)
3533 switch (gimple_code (ctx->stmt))
3535 case GIMPLE_OMP_FOR:
3536 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3537 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3538 break;
3539 /* FALLTHRU */
3540 case GIMPLE_OMP_SECTIONS:
3541 case GIMPLE_OMP_SINGLE:
3542 case GIMPLE_OMP_ORDERED:
3543 case GIMPLE_OMP_MASTER:
3544 case GIMPLE_OMP_MASKED:
3545 case GIMPLE_OMP_TASK:
3546 case GIMPLE_OMP_CRITICAL:
3547 if (is_gimple_call (stmt))
3549 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3550 != BUILT_IN_GOMP_BARRIER)
3551 return true;
3552 error_at (gimple_location (stmt),
3553 "barrier region may not be closely nested inside "
3554 "of work-sharing, %<loop%>, %<critical%>, "
3555 "%<ordered%>, %<master%>, %<masked%>, explicit "
3556 "%<task%> or %<taskloop%> region");
3557 return false;
3559 error_at (gimple_location (stmt),
3560 "work-sharing region may not be closely nested inside "
3561 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3562 "%<master%>, %<masked%>, explicit %<task%> or "
3563 "%<taskloop%> region");
3564 return false;
3565 case GIMPLE_OMP_PARALLEL:
3566 case GIMPLE_OMP_TEAMS:
3567 return true;
3568 case GIMPLE_OMP_TARGET:
3569 if (gimple_omp_target_kind (ctx->stmt)
3570 == GF_OMP_TARGET_KIND_REGION)
3571 return true;
3572 break;
3573 default:
3574 break;
3576 break;
3577 case GIMPLE_OMP_MASTER:
3578 case GIMPLE_OMP_MASKED:
3579 for (; ctx != NULL; ctx = ctx->outer)
3580 switch (gimple_code (ctx->stmt))
3582 case GIMPLE_OMP_FOR:
3583 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3584 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3585 break;
3586 /* FALLTHRU */
3587 case GIMPLE_OMP_SECTIONS:
3588 case GIMPLE_OMP_SINGLE:
3589 case GIMPLE_OMP_TASK:
3590 error_at (gimple_location (stmt),
3591 "%qs region may not be closely nested inside "
3592 "of work-sharing, %<loop%>, explicit %<task%> or "
3593 "%<taskloop%> region",
3594 gimple_code (stmt) == GIMPLE_OMP_MASTER
3595 ? "master" : "masked");
3596 return false;
3597 case GIMPLE_OMP_PARALLEL:
3598 case GIMPLE_OMP_TEAMS:
3599 return true;
3600 case GIMPLE_OMP_TARGET:
3601 if (gimple_omp_target_kind (ctx->stmt)
3602 == GF_OMP_TARGET_KIND_REGION)
3603 return true;
3604 break;
3605 default:
3606 break;
3608 break;
3609 case GIMPLE_OMP_SCOPE:
3610 for (; ctx != NULL; ctx = ctx->outer)
3611 switch (gimple_code (ctx->stmt))
3613 case GIMPLE_OMP_FOR:
3614 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3615 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3616 break;
3617 /* FALLTHRU */
3618 case GIMPLE_OMP_SECTIONS:
3619 case GIMPLE_OMP_SINGLE:
3620 case GIMPLE_OMP_TASK:
3621 case GIMPLE_OMP_CRITICAL:
3622 case GIMPLE_OMP_ORDERED:
3623 case GIMPLE_OMP_MASTER:
3624 case GIMPLE_OMP_MASKED:
3625 error_at (gimple_location (stmt),
3626 "%<scope%> region may not be closely nested inside "
3627 "of work-sharing, %<loop%>, explicit %<task%>, "
3628 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3629 "or %<masked%> region");
3630 return false;
3631 case GIMPLE_OMP_PARALLEL:
3632 case GIMPLE_OMP_TEAMS:
3633 return true;
3634 case GIMPLE_OMP_TARGET:
3635 if (gimple_omp_target_kind (ctx->stmt)
3636 == GF_OMP_TARGET_KIND_REGION)
3637 return true;
3638 break;
3639 default:
3640 break;
3642 break;
3643 case GIMPLE_OMP_TASK:
3644 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3645 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS)
3647 enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c);
3648 error_at (OMP_CLAUSE_LOCATION (c),
3649 "%<%s(%s)%> is only allowed in %<omp ordered%>",
3650 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross",
3651 kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink");
3652 return false;
3654 break;
3655 case GIMPLE_OMP_ORDERED:
3656 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3657 c; c = OMP_CLAUSE_CHAIN (c))
3659 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS)
3661 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
3663 error_at (OMP_CLAUSE_LOCATION (c),
3664 "invalid depend kind in omp %<ordered%> %<depend%>");
3665 return false;
3667 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3668 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3669 continue;
3672 tree oclause;
3673 /* Look for containing ordered(N) loop. */
3674 if (ctx == NULL
3675 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3676 || (oclause
3677 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3678 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3680 error_at (OMP_CLAUSE_LOCATION (c),
3681 "%<ordered%> construct with %<depend%> clause "
3682 "must be closely nested inside an %<ordered%> loop");
3683 return false;
3686 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3687 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3689 /* ordered simd must be closely nested inside of simd region,
3690 and simd region must not encounter constructs other than
3691 ordered simd, therefore ordered simd may be either orphaned,
3692 or ctx->stmt must be simd. The latter case is handled already
3693 earlier. */
3694 if (ctx != NULL)
3696 error_at (gimple_location (stmt),
3697 "%<ordered%> %<simd%> must be closely nested inside "
3698 "%<simd%> region");
3699 return false;
3702 for (; ctx != NULL; ctx = ctx->outer)
3703 switch (gimple_code (ctx->stmt))
3705 case GIMPLE_OMP_CRITICAL:
3706 case GIMPLE_OMP_TASK:
3707 case GIMPLE_OMP_ORDERED:
3708 ordered_in_taskloop:
3709 error_at (gimple_location (stmt),
3710 "%<ordered%> region may not be closely nested inside "
3711 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3712 "%<taskloop%> region");
3713 return false;
3714 case GIMPLE_OMP_FOR:
3715 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3716 goto ordered_in_taskloop;
3717 tree o;
3718 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3719 OMP_CLAUSE_ORDERED);
3720 if (o == NULL)
3722 error_at (gimple_location (stmt),
3723 "%<ordered%> region must be closely nested inside "
3724 "a loop region with an %<ordered%> clause");
3725 return false;
3727 if (!gimple_omp_ordered_standalone_p (stmt))
3729 if (OMP_CLAUSE_ORDERED_DOACROSS (o))
3731 error_at (gimple_location (stmt),
3732 "%<ordered%> construct without %<doacross%> or "
3733 "%<depend%> clauses must not have the same "
3734 "binding region as %<ordered%> construct with "
3735 "those clauses");
3736 return false;
3738 else if (OMP_CLAUSE_ORDERED_EXPR (o))
3740 tree co
3741 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3742 OMP_CLAUSE_COLLAPSE);
3743 HOST_WIDE_INT
3744 o_n = tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (o));
3745 HOST_WIDE_INT c_n = 1;
3746 if (co)
3747 c_n = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (co));
3748 if (o_n != c_n)
3750 error_at (gimple_location (stmt),
3751 "%<ordered%> construct without %<doacross%> "
3752 "or %<depend%> clauses binds to loop where "
3753 "%<collapse%> argument %wd is different from "
3754 "%<ordered%> argument %wd", c_n, o_n);
3755 return false;
3759 return true;
3760 case GIMPLE_OMP_TARGET:
3761 if (gimple_omp_target_kind (ctx->stmt)
3762 != GF_OMP_TARGET_KIND_REGION)
3763 break;
3764 /* FALLTHRU */
3765 case GIMPLE_OMP_PARALLEL:
3766 case GIMPLE_OMP_TEAMS:
3767 error_at (gimple_location (stmt),
3768 "%<ordered%> region must be closely nested inside "
3769 "a loop region with an %<ordered%> clause");
3770 return false;
3771 default:
3772 break;
3774 break;
3775 case GIMPLE_OMP_CRITICAL:
3777 tree this_stmt_name
3778 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3779 for (; ctx != NULL; ctx = ctx->outer)
3780 if (gomp_critical *other_crit
3781 = dyn_cast <gomp_critical *> (ctx->stmt))
3782 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3784 error_at (gimple_location (stmt),
3785 "%<critical%> region may not be nested inside "
3786 "a %<critical%> region with the same name");
3787 return false;
3790 break;
3791 case GIMPLE_OMP_TEAMS:
3792 if (ctx == NULL)
3793 break;
3794 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3795 || (gimple_omp_target_kind (ctx->stmt)
3796 != GF_OMP_TARGET_KIND_REGION))
3798 /* Teams construct can appear either strictly nested inside of
3799 target construct with no intervening stmts, or can be encountered
3800 only by initial task (so must not appear inside any OpenMP
3801 construct. */
3802 error_at (gimple_location (stmt),
3803 "%<teams%> construct must be closely nested inside of "
3804 "%<target%> construct or not nested in any OpenMP "
3805 "construct");
3806 return false;
3808 break;
3809 case GIMPLE_OMP_TARGET:
3810 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3811 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS)
3813 enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c);
3814 error_at (OMP_CLAUSE_LOCATION (c),
3815 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3816 kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink");
3817 return false;
3819 if (is_gimple_omp_offloaded (stmt)
3820 && oacc_get_fn_attrib (cfun->decl) != NULL)
3822 error_at (gimple_location (stmt),
3823 "OpenACC region inside of OpenACC routine, nested "
3824 "parallelism not supported yet");
3825 return false;
3827 for (; ctx != NULL; ctx = ctx->outer)
3829 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3831 if (is_gimple_omp (stmt)
3832 && is_gimple_omp_oacc (stmt)
3833 && is_gimple_omp (ctx->stmt))
3835 error_at (gimple_location (stmt),
3836 "OpenACC construct inside of non-OpenACC region");
3837 return false;
3839 continue;
3842 const char *stmt_name, *ctx_stmt_name;
3843 switch (gimple_omp_target_kind (stmt))
3845 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3846 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3847 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3848 case GF_OMP_TARGET_KIND_ENTER_DATA:
3849 stmt_name = "target enter data"; break;
3850 case GF_OMP_TARGET_KIND_EXIT_DATA:
3851 stmt_name = "target exit data"; break;
3852 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3853 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3854 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3855 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3856 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3857 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3858 stmt_name = "enter data"; break;
3859 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3860 stmt_name = "exit data"; break;
3861 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3862 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3863 break;
3864 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3865 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3866 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3867 /* OpenACC 'kernels' decomposed parts. */
3868 stmt_name = "kernels"; break;
3869 default: gcc_unreachable ();
3871 switch (gimple_omp_target_kind (ctx->stmt))
3873 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3874 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3875 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3876 ctx_stmt_name = "parallel"; break;
3877 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3878 ctx_stmt_name = "kernels"; break;
3879 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3880 ctx_stmt_name = "serial"; break;
3881 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3882 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3883 ctx_stmt_name = "host_data"; break;
3884 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3885 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3886 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3887 /* OpenACC 'kernels' decomposed parts. */
3888 ctx_stmt_name = "kernels"; break;
3889 default: gcc_unreachable ();
3892 /* OpenACC/OpenMP mismatch? */
3893 if (is_gimple_omp_oacc (stmt)
3894 != is_gimple_omp_oacc (ctx->stmt))
3896 error_at (gimple_location (stmt),
3897 "%s %qs construct inside of %s %qs region",
3898 (is_gimple_omp_oacc (stmt)
3899 ? "OpenACC" : "OpenMP"), stmt_name,
3900 (is_gimple_omp_oacc (ctx->stmt)
3901 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3902 return false;
3904 if (is_gimple_omp_offloaded (ctx->stmt))
3906 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3907 if (is_gimple_omp_oacc (ctx->stmt))
3909 error_at (gimple_location (stmt),
3910 "%qs construct inside of %qs region",
3911 stmt_name, ctx_stmt_name);
3912 return false;
3914 else
3916 if ((gimple_omp_target_kind (ctx->stmt)
3917 == GF_OMP_TARGET_KIND_REGION)
3918 && (gimple_omp_target_kind (stmt)
3919 == GF_OMP_TARGET_KIND_REGION))
3921 c = omp_find_clause (gimple_omp_target_clauses (stmt),
3922 OMP_CLAUSE_DEVICE);
3923 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3924 break;
3926 warning_at (gimple_location (stmt), 0,
3927 "%qs construct inside of %qs region",
3928 stmt_name, ctx_stmt_name);
3932 break;
3933 default:
3934 break;
3936 return true;
3940 /* Helper function scan_omp.
3942 Callback for walk_tree or operators in walk_gimple_stmt used to
3943 scan for OMP directives in TP. */
3945 static tree
3946 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3948 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3949 omp_context *ctx = (omp_context *) wi->info;
3950 tree t = *tp;
3952 switch (TREE_CODE (t))
3954 case VAR_DECL:
3955 case PARM_DECL:
3956 case LABEL_DECL:
3957 case RESULT_DECL:
3958 if (ctx)
3960 tree repl = remap_decl (t, &ctx->cb);
3961 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3962 *tp = repl;
3964 break;
3966 default:
3967 if (ctx && TYPE_P (t))
3968 *tp = remap_type (t, &ctx->cb);
3969 else if (!DECL_P (t))
3971 *walk_subtrees = 1;
3972 if (ctx)
3974 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3975 if (tem != TREE_TYPE (t))
3977 if (TREE_CODE (t) == INTEGER_CST)
3978 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3979 else
3980 TREE_TYPE (t) = tem;
3984 break;
3987 return NULL_TREE;
3990 /* Return true if FNDECL is a setjmp or a longjmp. */
3992 static bool
3993 setjmp_or_longjmp_p (const_tree fndecl)
3995 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3996 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3997 return true;
3999 tree declname = DECL_NAME (fndecl);
4000 if (!declname
4001 || (DECL_CONTEXT (fndecl) != NULL_TREE
4002 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
4003 || !TREE_PUBLIC (fndecl))
4004 return false;
4006 const char *name = IDENTIFIER_POINTER (declname);
4007 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
4010 /* Return true if FNDECL is an omp_* runtime API call. */
4012 static bool
4013 omp_runtime_api_call (const_tree fndecl)
4015 tree declname = DECL_NAME (fndecl);
4016 if (!declname
4017 || (DECL_CONTEXT (fndecl) != NULL_TREE
4018 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
4019 || !TREE_PUBLIC (fndecl))
4020 return false;
4022 const char *name = IDENTIFIER_POINTER (declname);
4023 if (!startswith (name, "omp_"))
4024 return false;
4026 static const char *omp_runtime_apis[] =
4028 /* This array has 3 sections. First omp_* calls that don't
4029 have any suffixes. */
4030 "aligned_alloc",
4031 "aligned_calloc",
4032 "alloc",
4033 "calloc",
4034 "free",
4035 "get_mapped_ptr",
4036 "realloc",
4037 "target_alloc",
4038 "target_associate_ptr",
4039 "target_disassociate_ptr",
4040 "target_free",
4041 "target_is_accessible",
4042 "target_is_present",
4043 "target_memcpy",
4044 "target_memcpy_async",
4045 "target_memcpy_rect",
4046 "target_memcpy_rect_async",
4047 NULL,
4048 /* Now omp_* calls that are available as omp_* and omp_*_; however, the
4049 DECL_NAME is always omp_* without tailing underscore. */
4050 "capture_affinity",
4051 "destroy_allocator",
4052 "destroy_lock",
4053 "destroy_nest_lock",
4054 "display_affinity",
4055 "fulfill_event",
4056 "get_active_level",
4057 "get_affinity_format",
4058 "get_cancellation",
4059 "get_default_allocator",
4060 "get_default_device",
4061 "get_device_num",
4062 "get_dynamic",
4063 "get_initial_device",
4064 "get_level",
4065 "get_max_active_levels",
4066 "get_max_task_priority",
4067 "get_max_teams",
4068 "get_max_threads",
4069 "get_nested",
4070 "get_num_devices",
4071 "get_num_places",
4072 "get_num_procs",
4073 "get_num_teams",
4074 "get_num_threads",
4075 "get_partition_num_places",
4076 "get_place_num",
4077 "get_proc_bind",
4078 "get_supported_active_levels",
4079 "get_team_num",
4080 "get_teams_thread_limit",
4081 "get_thread_limit",
4082 "get_thread_num",
4083 "get_wtick",
4084 "get_wtime",
4085 "in_final",
4086 "in_parallel",
4087 "init_lock",
4088 "init_nest_lock",
4089 "is_initial_device",
4090 "pause_resource",
4091 "pause_resource_all",
4092 "set_affinity_format",
4093 "set_default_allocator",
4094 "set_lock",
4095 "set_nest_lock",
4096 "test_lock",
4097 "test_nest_lock",
4098 "unset_lock",
4099 "unset_nest_lock",
4100 NULL,
4101 /* And finally calls available as omp_*, omp_*_ and omp_*_8_; however,
4102 as DECL_NAME only omp_* and omp_*_8 appear. */
4103 "display_env",
4104 "get_ancestor_thread_num",
4105 "init_allocator",
4106 "get_partition_place_nums",
4107 "get_place_num_procs",
4108 "get_place_proc_ids",
4109 "get_schedule",
4110 "get_team_size",
4111 "set_default_device",
4112 "set_dynamic",
4113 "set_max_active_levels",
4114 "set_nested",
4115 "set_num_teams",
4116 "set_num_threads",
4117 "set_schedule",
4118 "set_teams_thread_limit"
4121 int mode = 0;
4122 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
4124 if (omp_runtime_apis[i] == NULL)
4126 mode++;
4127 continue;
4129 size_t len = strlen (omp_runtime_apis[i]);
4130 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
4131 && (name[4 + len] == '\0'
4132 || (mode > 1 && strcmp (name + 4 + len, "_8") == 0)))
4133 return true;
4135 return false;
4138 /* Helper function for scan_omp.
4140 Callback for walk_gimple_stmt used to scan for OMP directives in
4141 the current statement in GSI. */
4143 static tree
4144 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4145 struct walk_stmt_info *wi)
4147 gimple *stmt = gsi_stmt (*gsi);
4148 omp_context *ctx = (omp_context *) wi->info;
4150 if (gimple_has_location (stmt))
4151 input_location = gimple_location (stmt);
4153 /* Check the nesting restrictions. */
4154 bool remove = false;
4155 if (is_gimple_omp (stmt))
4156 remove = !check_omp_nesting_restrictions (stmt, ctx);
4157 else if (is_gimple_call (stmt))
4159 tree fndecl = gimple_call_fndecl (stmt);
4160 if (fndecl)
4162 if (ctx
4163 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4164 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4165 && setjmp_or_longjmp_p (fndecl)
4166 && !ctx->loop_p)
4168 remove = true;
4169 error_at (gimple_location (stmt),
4170 "setjmp/longjmp inside %<simd%> construct");
4172 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4173 switch (DECL_FUNCTION_CODE (fndecl))
4175 case BUILT_IN_GOMP_BARRIER:
4176 case BUILT_IN_GOMP_CANCEL:
4177 case BUILT_IN_GOMP_CANCELLATION_POINT:
4178 case BUILT_IN_GOMP_TASKYIELD:
4179 case BUILT_IN_GOMP_TASKWAIT:
4180 case BUILT_IN_GOMP_TASKGROUP_START:
4181 case BUILT_IN_GOMP_TASKGROUP_END:
4182 remove = !check_omp_nesting_restrictions (stmt, ctx);
4183 break;
4184 default:
4185 break;
4187 else if (ctx)
4189 omp_context *octx = ctx;
4190 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4191 octx = ctx->outer;
4192 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4194 remove = true;
4195 error_at (gimple_location (stmt),
4196 "OpenMP runtime API call %qD in a region with "
4197 "%<order(concurrent)%> clause", fndecl);
4199 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4200 && omp_runtime_api_call (fndecl)
4201 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4202 != strlen ("omp_get_num_teams"))
4203 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4204 "omp_get_num_teams") != 0)
4205 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4206 != strlen ("omp_get_team_num"))
4207 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4208 "omp_get_team_num") != 0))
4210 remove = true;
4211 error_at (gimple_location (stmt),
4212 "OpenMP runtime API call %qD strictly nested in a "
4213 "%<teams%> region", fndecl);
4215 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
4216 && (gimple_omp_target_kind (ctx->stmt)
4217 == GF_OMP_TARGET_KIND_REGION)
4218 && omp_runtime_api_call (fndecl))
4220 tree tgt_clauses = gimple_omp_target_clauses (ctx->stmt);
4221 tree c = omp_find_clause (tgt_clauses, OMP_CLAUSE_DEVICE);
4222 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4223 error_at (gimple_location (stmt),
4224 "OpenMP runtime API call %qD in a region with "
4225 "%<device(ancestor)%> clause", fndecl);
4230 if (remove)
4232 stmt = gimple_build_nop ();
4233 gsi_replace (gsi, stmt, false);
4236 *handled_ops_p = true;
4238 switch (gimple_code (stmt))
4240 case GIMPLE_OMP_PARALLEL:
4241 taskreg_nesting_level++;
4242 scan_omp_parallel (gsi, ctx);
4243 taskreg_nesting_level--;
4244 break;
4246 case GIMPLE_OMP_TASK:
4247 taskreg_nesting_level++;
4248 scan_omp_task (gsi, ctx);
4249 taskreg_nesting_level--;
4250 break;
4252 case GIMPLE_OMP_FOR:
4253 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4254 == GF_OMP_FOR_KIND_SIMD)
4255 && gimple_omp_for_combined_into_p (stmt)
4256 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4258 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4259 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4260 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4262 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4263 break;
4266 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4267 == GF_OMP_FOR_KIND_SIMD)
4268 && omp_maybe_offloaded_ctx (ctx)
4269 && omp_max_simt_vf ()
4270 && gimple_omp_for_collapse (stmt) == 1)
4271 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4272 else
4273 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4274 break;
4276 case GIMPLE_OMP_SCOPE:
4277 ctx = new_omp_context (stmt, ctx);
4278 scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4279 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4280 break;
4282 case GIMPLE_OMP_SECTIONS:
4283 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4284 break;
4286 case GIMPLE_OMP_SINGLE:
4287 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4288 break;
4290 case GIMPLE_OMP_SCAN:
4291 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4293 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4294 ctx->scan_inclusive = true;
4295 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4296 ctx->scan_exclusive = true;
4298 /* FALLTHRU */
4299 case GIMPLE_OMP_SECTION:
4300 case GIMPLE_OMP_MASTER:
4301 case GIMPLE_OMP_ORDERED:
4302 case GIMPLE_OMP_CRITICAL:
4303 ctx = new_omp_context (stmt, ctx);
4304 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4305 break;
4307 case GIMPLE_OMP_MASKED:
4308 ctx = new_omp_context (stmt, ctx);
4309 scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4310 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4311 break;
4313 case GIMPLE_OMP_TASKGROUP:
4314 ctx = new_omp_context (stmt, ctx);
4315 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4316 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4317 break;
4319 case GIMPLE_OMP_TARGET:
4320 if (is_gimple_omp_offloaded (stmt))
4322 taskreg_nesting_level++;
4323 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4324 taskreg_nesting_level--;
4326 else
4327 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4328 break;
4330 case GIMPLE_OMP_TEAMS:
4331 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4333 taskreg_nesting_level++;
4334 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4335 taskreg_nesting_level--;
4337 else
4338 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4339 break;
4341 case GIMPLE_BIND:
4343 tree var;
4345 *handled_ops_p = false;
4346 if (ctx)
4347 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4348 var ;
4349 var = DECL_CHAIN (var))
4350 insert_decl_map (&ctx->cb, var, var);
4352 break;
4353 default:
4354 *handled_ops_p = false;
4355 break;
4358 return NULL_TREE;
4362 /* Scan all the statements starting at the current statement. CTX
4363 contains context information about the OMP directives and
4364 clauses found during the scan. */
4366 static void
4367 scan_omp (gimple_seq *body_p, omp_context *ctx)
4369 location_t saved_location;
4370 struct walk_stmt_info wi;
4372 memset (&wi, 0, sizeof (wi));
4373 wi.info = ctx;
4374 wi.want_locations = true;
4376 saved_location = input_location;
4377 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4378 input_location = saved_location;
4381 /* Re-gimplification and code generation routines. */
4383 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4384 of BIND if in a method. */
4386 static void
4387 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4389 if (DECL_ARGUMENTS (current_function_decl)
4390 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4391 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4392 == POINTER_TYPE))
4394 tree vars = gimple_bind_vars (bind);
4395 for (tree *pvar = &vars; *pvar; )
4396 if (omp_member_access_dummy_var (*pvar))
4397 *pvar = DECL_CHAIN (*pvar);
4398 else
4399 pvar = &DECL_CHAIN (*pvar);
4400 gimple_bind_set_vars (bind, vars);
4404 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4405 block and its subblocks. */
4407 static void
4408 remove_member_access_dummy_vars (tree block)
4410 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4411 if (omp_member_access_dummy_var (*pvar))
4412 *pvar = DECL_CHAIN (*pvar);
4413 else
4414 pvar = &DECL_CHAIN (*pvar);
4416 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4417 remove_member_access_dummy_vars (block);
4420 /* If a context was created for STMT when it was scanned, return it. */
4422 static omp_context *
4423 maybe_lookup_ctx (gimple *stmt)
4425 splay_tree_node n;
4426 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4427 return n ? (omp_context *) n->value : NULL;
4431 /* Find the mapping for DECL in CTX or the immediately enclosing
4432 context that has a mapping for DECL.
4434 If CTX is a nested parallel directive, we may have to use the decl
4435 mappings created in CTX's parent context. Suppose that we have the
4436 following parallel nesting (variable UIDs showed for clarity):
4438 iD.1562 = 0;
4439 #omp parallel shared(iD.1562) -> outer parallel
4440 iD.1562 = iD.1562 + 1;
4442 #omp parallel shared (iD.1562) -> inner parallel
4443 iD.1562 = iD.1562 - 1;
4445 Each parallel structure will create a distinct .omp_data_s structure
4446 for copying iD.1562 in/out of the directive:
4448 outer parallel .omp_data_s.1.i -> iD.1562
4449 inner parallel .omp_data_s.2.i -> iD.1562
4451 A shared variable mapping will produce a copy-out operation before
4452 the parallel directive and a copy-in operation after it. So, in
4453 this case we would have:
4455 iD.1562 = 0;
4456 .omp_data_o.1.i = iD.1562;
4457 #omp parallel shared(iD.1562) -> outer parallel
4458 .omp_data_i.1 = &.omp_data_o.1
4459 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4461 .omp_data_o.2.i = iD.1562; -> **
4462 #omp parallel shared(iD.1562) -> inner parallel
4463 .omp_data_i.2 = &.omp_data_o.2
4464 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4467 ** This is a problem. The symbol iD.1562 cannot be referenced
4468 inside the body of the outer parallel region. But since we are
4469 emitting this copy operation while expanding the inner parallel
4470 directive, we need to access the CTX structure of the outer
4471 parallel directive to get the correct mapping:
4473 .omp_data_o.2.i = .omp_data_i.1->i
4475 Since there may be other workshare or parallel directives enclosing
4476 the parallel directive, it may be necessary to walk up the context
4477 parent chain. This is not a problem in general because nested
4478 parallelism happens only rarely. */
4480 static tree
4481 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4483 tree t;
4484 omp_context *up;
4486 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4487 t = maybe_lookup_decl (decl, up);
4489 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4491 return t ? t : decl;
4495 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4496 in outer contexts. */
4498 static tree
4499 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4501 tree t = NULL;
4502 omp_context *up;
4504 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4505 t = maybe_lookup_decl (decl, up);
4507 return t ? t : decl;
4511 /* Construct the initialization value for reduction operation OP. */
4513 tree
4514 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4516 switch (op)
4518 case PLUS_EXPR:
4519 case MINUS_EXPR:
4520 case BIT_IOR_EXPR:
4521 case BIT_XOR_EXPR:
4522 case TRUTH_OR_EXPR:
4523 case TRUTH_ORIF_EXPR:
4524 case TRUTH_XOR_EXPR:
4525 case NE_EXPR:
4526 return build_zero_cst (type);
4528 case MULT_EXPR:
4529 case TRUTH_AND_EXPR:
4530 case TRUTH_ANDIF_EXPR:
4531 case EQ_EXPR:
4532 return fold_convert_loc (loc, type, integer_one_node);
4534 case BIT_AND_EXPR:
4535 return fold_convert_loc (loc, type, integer_minus_one_node);
4537 case MAX_EXPR:
4538 if (SCALAR_FLOAT_TYPE_P (type))
4540 REAL_VALUE_TYPE min;
4541 if (HONOR_INFINITIES (type))
4542 real_arithmetic (&min, NEGATE_EXPR, &dconstinf, NULL);
4543 else
4544 real_maxval (&min, 1, TYPE_MODE (type));
4545 return build_real (type, min);
4547 else if (POINTER_TYPE_P (type))
4549 wide_int min
4550 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4551 return wide_int_to_tree (type, min);
4553 else
4555 gcc_assert (INTEGRAL_TYPE_P (type));
4556 return TYPE_MIN_VALUE (type);
4559 case MIN_EXPR:
4560 if (SCALAR_FLOAT_TYPE_P (type))
4562 REAL_VALUE_TYPE max;
4563 if (HONOR_INFINITIES (type))
4564 max = dconstinf;
4565 else
4566 real_maxval (&max, 0, TYPE_MODE (type));
4567 return build_real (type, max);
4569 else if (POINTER_TYPE_P (type))
4571 wide_int max
4572 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4573 return wide_int_to_tree (type, max);
4575 else
4577 gcc_assert (INTEGRAL_TYPE_P (type));
4578 return TYPE_MAX_VALUE (type);
4581 default:
4582 gcc_unreachable ();
4586 /* Construct the initialization value for reduction CLAUSE. */
4588 tree
4589 omp_reduction_init (tree clause, tree type)
4591 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4592 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4595 /* Return alignment to be assumed for var in CLAUSE, which should be
4596 OMP_CLAUSE_ALIGNED. */
4598 static tree
4599 omp_clause_aligned_alignment (tree clause)
4601 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4602 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4604 /* Otherwise return implementation defined alignment. */
4605 unsigned int al = 1;
4606 opt_scalar_mode mode_iter;
4607 auto_vector_modes modes;
4608 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4609 static enum mode_class classes[]
4610 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4611 for (int i = 0; i < 4; i += 2)
4612 /* The for loop above dictates that we only walk through scalar classes. */
4613 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4615 scalar_mode mode = mode_iter.require ();
4616 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4617 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4618 continue;
4619 machine_mode alt_vmode;
4620 for (unsigned int j = 0; j < modes.length (); ++j)
4621 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4622 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4623 vmode = alt_vmode;
4625 tree type = lang_hooks.types.type_for_mode (mode, 1);
4626 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4627 continue;
4628 type = build_vector_type_for_mode (type, vmode);
4629 if (TYPE_MODE (type) != vmode)
4630 continue;
4631 if (TYPE_ALIGN_UNIT (type) > al)
4632 al = TYPE_ALIGN_UNIT (type);
4634 return build_int_cst (integer_type_node, al);
4638 /* This structure is part of the interface between lower_rec_simd_input_clauses
4639 and lower_rec_input_clauses. */
4641 class omplow_simd_context {
4642 public:
4643 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4644 tree idx;
4645 tree lane;
4646 tree lastlane;
4647 vec<tree, va_heap> simt_eargs;
4648 gimple_seq simt_dlist;
4649 poly_uint64_pod max_vf;
4650 bool is_simt;
4653 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4654 privatization. */
4656 static bool
4657 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4658 omplow_simd_context *sctx, tree &ivar,
4659 tree &lvar, tree *rvar = NULL,
4660 tree *rvar2 = NULL)
4662 if (known_eq (sctx->max_vf, 0U))
4664 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4665 if (maybe_gt (sctx->max_vf, 1U))
4667 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4668 OMP_CLAUSE_SAFELEN);
4669 if (c)
4671 poly_uint64 safe_len;
4672 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4673 || maybe_lt (safe_len, 1U))
4674 sctx->max_vf = 1;
4675 else
4676 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4679 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4681 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4682 c = OMP_CLAUSE_CHAIN (c))
4684 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4685 continue;
4687 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4689 /* UDR reductions are not supported yet for SIMT, disable
4690 SIMT. */
4691 sctx->max_vf = 1;
4692 break;
4695 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4696 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4698 /* Doing boolean operations on non-integral types is
4699 for conformance only, it's not worth supporting this
4700 for SIMT. */
4701 sctx->max_vf = 1;
4702 break;
4706 if (maybe_gt (sctx->max_vf, 1U))
4708 sctx->idx = create_tmp_var (unsigned_type_node);
4709 sctx->lane = create_tmp_var (unsigned_type_node);
4712 if (known_eq (sctx->max_vf, 1U))
4713 return false;
4715 if (sctx->is_simt)
4717 if (is_gimple_reg (new_var))
4719 ivar = lvar = new_var;
4720 return true;
4722 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4723 ivar = lvar = create_tmp_var (type);
4724 TREE_ADDRESSABLE (ivar) = 1;
4725 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4726 NULL, DECL_ATTRIBUTES (ivar));
4727 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4728 tree clobber = build_clobber (type);
4729 gimple *g = gimple_build_assign (ivar, clobber);
4730 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4732 else
4734 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4735 tree avar = create_tmp_var_raw (atype);
4736 if (TREE_ADDRESSABLE (new_var))
4737 TREE_ADDRESSABLE (avar) = 1;
4738 DECL_ATTRIBUTES (avar)
4739 = tree_cons (get_identifier ("omp simd array"), NULL,
4740 DECL_ATTRIBUTES (avar));
4741 gimple_add_tmp_var (avar);
4742 tree iavar = avar;
4743 if (rvar && !ctx->for_simd_scan_phase)
4745 /* For inscan reductions, create another array temporary,
4746 which will hold the reduced value. */
4747 iavar = create_tmp_var_raw (atype);
4748 if (TREE_ADDRESSABLE (new_var))
4749 TREE_ADDRESSABLE (iavar) = 1;
4750 DECL_ATTRIBUTES (iavar)
4751 = tree_cons (get_identifier ("omp simd array"), NULL,
4752 tree_cons (get_identifier ("omp simd inscan"), NULL,
4753 DECL_ATTRIBUTES (iavar)));
4754 gimple_add_tmp_var (iavar);
4755 ctx->cb.decl_map->put (avar, iavar);
4756 if (sctx->lastlane == NULL_TREE)
4757 sctx->lastlane = create_tmp_var (unsigned_type_node);
4758 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4759 sctx->lastlane, NULL_TREE, NULL_TREE);
4760 TREE_THIS_NOTRAP (*rvar) = 1;
4762 if (ctx->scan_exclusive)
4764 /* And for exclusive scan yet another one, which will
4765 hold the value during the scan phase. */
4766 tree savar = create_tmp_var_raw (atype);
4767 if (TREE_ADDRESSABLE (new_var))
4768 TREE_ADDRESSABLE (savar) = 1;
4769 DECL_ATTRIBUTES (savar)
4770 = tree_cons (get_identifier ("omp simd array"), NULL,
4771 tree_cons (get_identifier ("omp simd inscan "
4772 "exclusive"), NULL,
4773 DECL_ATTRIBUTES (savar)));
4774 gimple_add_tmp_var (savar);
4775 ctx->cb.decl_map->put (iavar, savar);
4776 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4777 sctx->idx, NULL_TREE, NULL_TREE);
4778 TREE_THIS_NOTRAP (*rvar2) = 1;
4781 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4782 NULL_TREE, NULL_TREE);
4783 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4784 NULL_TREE, NULL_TREE);
4785 TREE_THIS_NOTRAP (ivar) = 1;
4786 TREE_THIS_NOTRAP (lvar) = 1;
4788 if (DECL_P (new_var))
4790 SET_DECL_VALUE_EXPR (new_var, lvar);
4791 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4793 return true;
4796 /* Helper function of lower_rec_input_clauses. For a reference
4797 in simd reduction, add an underlying variable it will reference. */
4799 static void
4800 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4802 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4803 if (TREE_CONSTANT (z))
4805 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4806 get_name (new_vard));
4807 gimple_add_tmp_var (z);
4808 TREE_ADDRESSABLE (z) = 1;
4809 z = build_fold_addr_expr_loc (loc, z);
4810 gimplify_assign (new_vard, z, ilist);
4814 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4815 code to emit (type) (tskred_temp[idx]). */
4817 static tree
4818 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4819 unsigned idx)
4821 unsigned HOST_WIDE_INT sz
4822 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4823 tree r = build2 (MEM_REF, pointer_sized_int_node,
4824 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4825 idx * sz));
4826 tree v = create_tmp_var (pointer_sized_int_node);
4827 gimple *g = gimple_build_assign (v, r);
4828 gimple_seq_add_stmt (ilist, g);
4829 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4831 v = create_tmp_var (type);
4832 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4833 gimple_seq_add_stmt (ilist, g);
4835 return v;
4838 /* Lower early initialization of privatized variable NEW_VAR
4839 if it needs an allocator (has allocate clause). */
4841 static bool
4842 lower_private_allocate (tree var, tree new_var, tree &allocator,
4843 tree &allocate_ptr, gimple_seq *ilist,
4844 omp_context *ctx, bool is_ref, tree size)
4846 if (allocator)
4847 return false;
4848 gcc_assert (allocate_ptr == NULL_TREE);
4849 if (ctx->allocate_map
4850 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4851 if (tree *allocatorp = ctx->allocate_map->get (var))
4852 allocator = *allocatorp;
4853 if (allocator == NULL_TREE)
4854 return false;
4855 if (!is_ref && omp_privatize_by_reference (var))
4857 allocator = NULL_TREE;
4858 return false;
4861 unsigned HOST_WIDE_INT ialign = 0;
4862 if (TREE_CODE (allocator) == TREE_LIST)
4864 ialign = tree_to_uhwi (TREE_VALUE (allocator));
4865 allocator = TREE_PURPOSE (allocator);
4867 if (TREE_CODE (allocator) != INTEGER_CST)
4868 allocator = build_outer_var_ref (allocator, ctx, OMP_CLAUSE_ALLOCATE);
4869 allocator = fold_convert (pointer_sized_int_node, allocator);
4870 if (TREE_CODE (allocator) != INTEGER_CST)
4872 tree var = create_tmp_var (TREE_TYPE (allocator));
4873 gimplify_assign (var, allocator, ilist);
4874 allocator = var;
4877 tree ptr_type, align, sz = size;
4878 if (TYPE_P (new_var))
4880 ptr_type = build_pointer_type (new_var);
4881 ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4883 else if (is_ref)
4885 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4886 ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4888 else
4890 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4891 ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4892 if (sz == NULL_TREE)
4893 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4895 align = build_int_cst (size_type_node, ialign);
4896 if (TREE_CODE (sz) != INTEGER_CST)
4898 tree szvar = create_tmp_var (size_type_node);
4899 gimplify_assign (szvar, sz, ilist);
4900 sz = szvar;
4902 allocate_ptr = create_tmp_var (ptr_type);
4903 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4904 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4905 gimple_call_set_lhs (g, allocate_ptr);
4906 gimple_seq_add_stmt (ilist, g);
4907 if (!is_ref)
4909 tree x = build_simple_mem_ref (allocate_ptr);
4910 TREE_THIS_NOTRAP (x) = 1;
4911 SET_DECL_VALUE_EXPR (new_var, x);
4912 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4914 return true;
4917 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4918 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4919 private variables. Initialization statements go in ILIST, while calls
4920 to destructors go in DLIST. */
4922 static void
4923 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4924 omp_context *ctx, struct omp_for_data *fd)
4926 tree c, copyin_seq, x, ptr;
4927 bool copyin_by_ref = false;
4928 bool lastprivate_firstprivate = false;
4929 bool reduction_omp_orig_ref = false;
4930 int pass;
4931 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4932 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4933 omplow_simd_context sctx = omplow_simd_context ();
4934 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4935 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4936 gimple_seq llist[4] = { };
4937 tree nonconst_simd_if = NULL_TREE;
4939 copyin_seq = NULL;
4940 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4942 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4943 with data sharing clauses referencing variable sized vars. That
4944 is unnecessarily hard to support and very unlikely to result in
4945 vectorized code anyway. */
4946 if (is_simd)
4947 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4948 switch (OMP_CLAUSE_CODE (c))
4950 case OMP_CLAUSE_LINEAR:
4951 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4952 sctx.max_vf = 1;
4953 /* FALLTHRU */
4954 case OMP_CLAUSE_PRIVATE:
4955 case OMP_CLAUSE_FIRSTPRIVATE:
4956 case OMP_CLAUSE_LASTPRIVATE:
4957 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4958 sctx.max_vf = 1;
4959 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4961 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4962 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4963 sctx.max_vf = 1;
4965 break;
4966 case OMP_CLAUSE_REDUCTION:
4967 case OMP_CLAUSE_IN_REDUCTION:
4968 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4969 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4970 sctx.max_vf = 1;
4971 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4973 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4974 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4975 sctx.max_vf = 1;
4977 break;
4978 case OMP_CLAUSE_IF:
4979 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4980 sctx.max_vf = 1;
4981 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4982 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4983 break;
4984 case OMP_CLAUSE_SIMDLEN:
4985 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4986 sctx.max_vf = 1;
4987 break;
4988 case OMP_CLAUSE__CONDTEMP_:
4989 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4990 if (sctx.is_simt)
4991 sctx.max_vf = 1;
4992 break;
4993 default:
4994 continue;
4997 /* Add a placeholder for simduid. */
4998 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4999 sctx.simt_eargs.safe_push (NULL_TREE);
5001 unsigned task_reduction_cnt = 0;
5002 unsigned task_reduction_cntorig = 0;
5003 unsigned task_reduction_cnt_full = 0;
5004 unsigned task_reduction_cntorig_full = 0;
5005 unsigned task_reduction_other_cnt = 0;
5006 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
5007 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
5008 /* Do all the fixed sized types in the first pass, and the variable sized
5009 types in the second pass. This makes sure that the scalar arguments to
5010 the variable sized types are processed before we use them in the
5011 variable sized operations. For task reductions we use 4 passes, in the
5012 first two we ignore them, in the third one gather arguments for
5013 GOMP_task_reduction_remap call and in the last pass actually handle
5014 the task reductions. */
5015 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
5016 ? 4 : 2); ++pass)
5018 if (pass == 2 && task_reduction_cnt)
5020 tskred_atype
5021 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
5022 + task_reduction_cntorig);
5023 tskred_avar = create_tmp_var_raw (tskred_atype);
5024 gimple_add_tmp_var (tskred_avar);
5025 TREE_ADDRESSABLE (tskred_avar) = 1;
5026 task_reduction_cnt_full = task_reduction_cnt;
5027 task_reduction_cntorig_full = task_reduction_cntorig;
5029 else if (pass == 3 && task_reduction_cnt)
5031 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
5032 gimple *g
5033 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
5034 size_int (task_reduction_cntorig),
5035 build_fold_addr_expr (tskred_avar));
5036 gimple_seq_add_stmt (ilist, g);
5038 if (pass == 3 && task_reduction_other_cnt)
5040 /* For reduction clauses, build
5041 tskred_base = (void *) tskred_temp[2]
5042 + omp_get_thread_num () * tskred_temp[1]
5043 or if tskred_temp[1] is known to be constant, that constant
5044 directly. This is the start of the private reduction copy block
5045 for the current thread. */
5046 tree v = create_tmp_var (integer_type_node);
5047 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
5048 gimple *g = gimple_build_call (x, 0);
5049 gimple_call_set_lhs (g, v);
5050 gimple_seq_add_stmt (ilist, g);
5051 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
5052 tskred_temp = OMP_CLAUSE_DECL (c);
5053 if (is_taskreg_ctx (ctx))
5054 tskred_temp = lookup_decl (tskred_temp, ctx);
5055 tree v2 = create_tmp_var (sizetype);
5056 g = gimple_build_assign (v2, NOP_EXPR, v);
5057 gimple_seq_add_stmt (ilist, g);
5058 if (ctx->task_reductions[0])
5059 v = fold_convert (sizetype, ctx->task_reductions[0]);
5060 else
5061 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
5062 tree v3 = create_tmp_var (sizetype);
5063 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
5064 gimple_seq_add_stmt (ilist, g);
5065 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
5066 tskred_base = create_tmp_var (ptr_type_node);
5067 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
5068 gimple_seq_add_stmt (ilist, g);
5070 task_reduction_cnt = 0;
5071 task_reduction_cntorig = 0;
5072 task_reduction_other_cnt = 0;
5073 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5075 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
5076 tree var, new_var;
5077 bool by_ref;
5078 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5079 bool task_reduction_p = false;
5080 bool task_reduction_needs_orig_p = false;
5081 tree cond = NULL_TREE;
5082 tree allocator, allocate_ptr;
5084 switch (c_kind)
5086 case OMP_CLAUSE_PRIVATE:
5087 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
5088 continue;
5089 break;
5090 case OMP_CLAUSE_SHARED:
5091 /* Ignore shared directives in teams construct inside
5092 of target construct. */
5093 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5094 && !is_host_teams_ctx (ctx))
5095 continue;
5096 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
5098 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
5099 || is_global_var (OMP_CLAUSE_DECL (c)));
5100 continue;
5102 case OMP_CLAUSE_FIRSTPRIVATE:
5103 case OMP_CLAUSE_COPYIN:
5104 break;
5105 case OMP_CLAUSE_LINEAR:
5106 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
5107 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5108 lastprivate_firstprivate = true;
5109 break;
5110 case OMP_CLAUSE_REDUCTION:
5111 case OMP_CLAUSE_IN_REDUCTION:
5112 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5113 || is_task_ctx (ctx)
5114 || OMP_CLAUSE_REDUCTION_TASK (c))
5116 task_reduction_p = true;
5117 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5119 task_reduction_other_cnt++;
5120 if (pass == 2)
5121 continue;
5123 else
5124 task_reduction_cnt++;
5125 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5127 var = OMP_CLAUSE_DECL (c);
5128 /* If var is a global variable that isn't privatized
5129 in outer contexts, we don't need to look up the
5130 original address, it is always the address of the
5131 global variable itself. */
5132 if (!DECL_P (var)
5133 || omp_privatize_by_reference (var)
5134 || !is_global_var
5135 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5137 task_reduction_needs_orig_p = true;
5138 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5139 task_reduction_cntorig++;
5143 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5144 reduction_omp_orig_ref = true;
5145 break;
5146 case OMP_CLAUSE__REDUCTEMP_:
5147 if (!is_taskreg_ctx (ctx))
5148 continue;
5149 /* FALLTHRU */
5150 case OMP_CLAUSE__LOOPTEMP_:
5151 /* Handle _looptemp_/_reductemp_ clauses only on
5152 parallel/task. */
5153 if (fd)
5154 continue;
5155 break;
5156 case OMP_CLAUSE_LASTPRIVATE:
5157 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5159 lastprivate_firstprivate = true;
5160 if (pass != 0 || is_taskloop_ctx (ctx))
5161 continue;
5163 /* Even without corresponding firstprivate, if
5164 decl is Fortran allocatable, it needs outer var
5165 reference. */
5166 else if (pass == 0
5167 && lang_hooks.decls.omp_private_outer_ref
5168 (OMP_CLAUSE_DECL (c)))
5169 lastprivate_firstprivate = true;
5170 break;
5171 case OMP_CLAUSE_ALIGNED:
5172 if (pass != 1)
5173 continue;
5174 var = OMP_CLAUSE_DECL (c);
5175 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5176 && !is_global_var (var))
5178 new_var = maybe_lookup_decl (var, ctx);
5179 if (new_var == NULL_TREE)
5180 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5181 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5182 tree alarg = omp_clause_aligned_alignment (c);
5183 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5184 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5185 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5186 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5187 gimplify_and_add (x, ilist);
5189 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5190 && is_global_var (var))
5192 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5193 new_var = lookup_decl (var, ctx);
5194 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5195 t = build_fold_addr_expr_loc (clause_loc, t);
5196 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5197 tree alarg = omp_clause_aligned_alignment (c);
5198 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5199 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5200 t = fold_convert_loc (clause_loc, ptype, t);
5201 x = create_tmp_var (ptype);
5202 t = build2 (MODIFY_EXPR, ptype, x, t);
5203 gimplify_and_add (t, ilist);
5204 t = build_simple_mem_ref_loc (clause_loc, x);
5205 SET_DECL_VALUE_EXPR (new_var, t);
5206 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5208 continue;
5209 case OMP_CLAUSE__CONDTEMP_:
5210 if (is_parallel_ctx (ctx)
5211 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5212 break;
5213 continue;
5214 default:
5215 continue;
5218 if (task_reduction_p != (pass >= 2))
5219 continue;
5221 allocator = NULL_TREE;
5222 allocate_ptr = NULL_TREE;
5223 new_var = var = OMP_CLAUSE_DECL (c);
5224 if ((c_kind == OMP_CLAUSE_REDUCTION
5225 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5226 && TREE_CODE (var) == MEM_REF)
5228 var = TREE_OPERAND (var, 0);
5229 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5230 var = TREE_OPERAND (var, 0);
5231 if (TREE_CODE (var) == INDIRECT_REF
5232 || TREE_CODE (var) == ADDR_EXPR)
5233 var = TREE_OPERAND (var, 0);
5234 if (is_variable_sized (var))
5236 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5237 var = DECL_VALUE_EXPR (var);
5238 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5239 var = TREE_OPERAND (var, 0);
5240 gcc_assert (DECL_P (var));
5242 new_var = var;
5244 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5246 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5247 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5249 else if (c_kind != OMP_CLAUSE_COPYIN)
5250 new_var = lookup_decl (var, ctx);
5252 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5254 if (pass != 0)
5255 continue;
5257 /* C/C++ array section reductions. */
5258 else if ((c_kind == OMP_CLAUSE_REDUCTION
5259 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5260 && var != OMP_CLAUSE_DECL (c))
5262 if (pass == 0)
5263 continue;
5265 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5266 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5268 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5270 tree b = TREE_OPERAND (orig_var, 1);
5271 if (is_omp_target (ctx->stmt))
5272 b = NULL_TREE;
5273 else
5274 b = maybe_lookup_decl (b, ctx);
5275 if (b == NULL)
5277 b = TREE_OPERAND (orig_var, 1);
5278 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5280 if (integer_zerop (bias))
5281 bias = b;
5282 else
5284 bias = fold_convert_loc (clause_loc,
5285 TREE_TYPE (b), bias);
5286 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5287 TREE_TYPE (b), b, bias);
5289 orig_var = TREE_OPERAND (orig_var, 0);
5291 if (pass == 2)
5293 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5294 if (is_global_var (out)
5295 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5296 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5297 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5298 != POINTER_TYPE)))
5299 x = var;
5300 else if (is_omp_target (ctx->stmt))
5301 x = out;
5302 else
5304 bool by_ref = use_pointer_for_field (var, NULL);
5305 x = build_receiver_ref (var, by_ref, ctx);
5306 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5307 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5308 == POINTER_TYPE))
5309 x = build_fold_addr_expr (x);
5311 if (TREE_CODE (orig_var) == INDIRECT_REF)
5312 x = build_simple_mem_ref (x);
5313 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5315 if (var == TREE_OPERAND (orig_var, 0))
5316 x = build_fold_addr_expr (x);
5318 bias = fold_convert (sizetype, bias);
5319 x = fold_convert (ptr_type_node, x);
5320 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5321 TREE_TYPE (x), x, bias);
5322 unsigned cnt = task_reduction_cnt - 1;
5323 if (!task_reduction_needs_orig_p)
5324 cnt += (task_reduction_cntorig_full
5325 - task_reduction_cntorig);
5326 else
5327 cnt = task_reduction_cntorig - 1;
5328 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5329 size_int (cnt), NULL_TREE, NULL_TREE);
5330 gimplify_assign (r, x, ilist);
5331 continue;
5334 if (TREE_CODE (orig_var) == INDIRECT_REF
5335 || TREE_CODE (orig_var) == ADDR_EXPR)
5336 orig_var = TREE_OPERAND (orig_var, 0);
5337 tree d = OMP_CLAUSE_DECL (c);
5338 tree type = TREE_TYPE (d);
5339 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5340 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5341 tree sz = v;
5342 const char *name = get_name (orig_var);
5343 if (pass != 3 && !TREE_CONSTANT (v))
5345 tree t;
5346 if (is_omp_target (ctx->stmt))
5347 t = NULL_TREE;
5348 else
5349 t = maybe_lookup_decl (v, ctx);
5350 if (t)
5351 v = t;
5352 else
5353 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5354 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5355 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5356 TREE_TYPE (v), v,
5357 build_int_cst (TREE_TYPE (v), 1));
5358 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5359 TREE_TYPE (v), t,
5360 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5362 if (pass == 3)
5364 tree xv = create_tmp_var (ptr_type_node);
5365 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5367 unsigned cnt = task_reduction_cnt - 1;
5368 if (!task_reduction_needs_orig_p)
5369 cnt += (task_reduction_cntorig_full
5370 - task_reduction_cntorig);
5371 else
5372 cnt = task_reduction_cntorig - 1;
5373 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5374 size_int (cnt), NULL_TREE, NULL_TREE);
5376 gimple *g = gimple_build_assign (xv, x);
5377 gimple_seq_add_stmt (ilist, g);
5379 else
5381 unsigned int idx = *ctx->task_reduction_map->get (c);
5382 tree off;
5383 if (ctx->task_reductions[1 + idx])
5384 off = fold_convert (sizetype,
5385 ctx->task_reductions[1 + idx]);
5386 else
5387 off = task_reduction_read (ilist, tskred_temp, sizetype,
5388 7 + 3 * idx + 1);
5389 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5390 tskred_base, off);
5391 gimple_seq_add_stmt (ilist, g);
5393 x = fold_convert (build_pointer_type (boolean_type_node),
5394 xv);
5395 if (TREE_CONSTANT (v))
5396 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5397 TYPE_SIZE_UNIT (type));
5398 else
5400 tree t;
5401 if (is_omp_target (ctx->stmt))
5402 t = NULL_TREE;
5403 else
5404 t = maybe_lookup_decl (v, ctx);
5405 if (t)
5406 v = t;
5407 else
5408 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5409 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5410 fb_rvalue);
5411 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5412 TREE_TYPE (v), v,
5413 build_int_cst (TREE_TYPE (v), 1));
5414 t = fold_build2_loc (clause_loc, MULT_EXPR,
5415 TREE_TYPE (v), t,
5416 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5417 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5419 cond = create_tmp_var (TREE_TYPE (x));
5420 gimplify_assign (cond, x, ilist);
5421 x = xv;
5423 else if (lower_private_allocate (var, type, allocator,
5424 allocate_ptr, ilist, ctx,
5425 true,
5426 TREE_CONSTANT (v)
5427 ? TYPE_SIZE_UNIT (type)
5428 : sz))
5429 x = allocate_ptr;
5430 else if (TREE_CONSTANT (v))
5432 x = create_tmp_var_raw (type, name);
5433 gimple_add_tmp_var (x);
5434 TREE_ADDRESSABLE (x) = 1;
5435 x = build_fold_addr_expr_loc (clause_loc, x);
5437 else
5439 tree atmp
5440 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5441 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5442 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5445 tree ptype = build_pointer_type (TREE_TYPE (type));
5446 x = fold_convert_loc (clause_loc, ptype, x);
5447 tree y = create_tmp_var (ptype, name);
5448 gimplify_assign (y, x, ilist);
5449 x = y;
5450 tree yb = y;
5452 if (!integer_zerop (bias))
5454 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5455 bias);
5456 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5458 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5459 pointer_sized_int_node, yb, bias);
5460 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5461 yb = create_tmp_var (ptype, name);
5462 gimplify_assign (yb, x, ilist);
5463 x = yb;
5466 d = TREE_OPERAND (d, 0);
5467 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5468 d = TREE_OPERAND (d, 0);
5469 if (TREE_CODE (d) == ADDR_EXPR)
5471 if (orig_var != var)
5473 gcc_assert (is_variable_sized (orig_var));
5474 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5476 gimplify_assign (new_var, x, ilist);
5477 tree new_orig_var = lookup_decl (orig_var, ctx);
5478 tree t = build_fold_indirect_ref (new_var);
5479 DECL_IGNORED_P (new_var) = 0;
5480 TREE_THIS_NOTRAP (t) = 1;
5481 SET_DECL_VALUE_EXPR (new_orig_var, t);
5482 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5484 else
5486 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5487 build_int_cst (ptype, 0));
5488 SET_DECL_VALUE_EXPR (new_var, x);
5489 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5492 else
5494 gcc_assert (orig_var == var);
5495 if (TREE_CODE (d) == INDIRECT_REF)
5497 x = create_tmp_var (ptype, name);
5498 TREE_ADDRESSABLE (x) = 1;
5499 gimplify_assign (x, yb, ilist);
5500 x = build_fold_addr_expr_loc (clause_loc, x);
5502 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5503 gimplify_assign (new_var, x, ilist);
5505 /* GOMP_taskgroup_reduction_register memsets the whole
5506 array to zero. If the initializer is zero, we don't
5507 need to initialize it again, just mark it as ever
5508 used unconditionally, i.e. cond = true. */
5509 if (cond
5510 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5511 && initializer_zerop (omp_reduction_init (c,
5512 TREE_TYPE (type))))
5514 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5515 boolean_true_node);
5516 gimple_seq_add_stmt (ilist, g);
5517 continue;
5519 tree end = create_artificial_label (UNKNOWN_LOCATION);
5520 if (cond)
5522 gimple *g;
5523 if (!is_parallel_ctx (ctx))
5525 tree condv = create_tmp_var (boolean_type_node);
5526 g = gimple_build_assign (condv,
5527 build_simple_mem_ref (cond));
5528 gimple_seq_add_stmt (ilist, g);
5529 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5530 g = gimple_build_cond (NE_EXPR, condv,
5531 boolean_false_node, end, lab1);
5532 gimple_seq_add_stmt (ilist, g);
5533 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5535 g = gimple_build_assign (build_simple_mem_ref (cond),
5536 boolean_true_node);
5537 gimple_seq_add_stmt (ilist, g);
5540 tree y1 = create_tmp_var (ptype);
5541 gimplify_assign (y1, y, ilist);
5542 tree i2 = NULL_TREE, y2 = NULL_TREE;
5543 tree body2 = NULL_TREE, end2 = NULL_TREE;
5544 tree y3 = NULL_TREE, y4 = NULL_TREE;
5545 if (task_reduction_needs_orig_p)
5547 y3 = create_tmp_var (ptype);
5548 tree ref;
5549 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5550 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5551 size_int (task_reduction_cnt_full
5552 + task_reduction_cntorig - 1),
5553 NULL_TREE, NULL_TREE);
5554 else
5556 unsigned int idx = *ctx->task_reduction_map->get (c);
5557 ref = task_reduction_read (ilist, tskred_temp, ptype,
5558 7 + 3 * idx);
5560 gimplify_assign (y3, ref, ilist);
5562 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5564 if (pass != 3)
5566 y2 = create_tmp_var (ptype);
5567 gimplify_assign (y2, y, ilist);
5569 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5571 tree ref = build_outer_var_ref (var, ctx);
5572 /* For ref build_outer_var_ref already performs this. */
5573 if (TREE_CODE (d) == INDIRECT_REF)
5574 gcc_assert (omp_privatize_by_reference (var));
5575 else if (TREE_CODE (d) == ADDR_EXPR)
5576 ref = build_fold_addr_expr (ref);
5577 else if (omp_privatize_by_reference (var))
5578 ref = build_fold_addr_expr (ref);
5579 ref = fold_convert_loc (clause_loc, ptype, ref);
5580 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5581 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5583 y3 = create_tmp_var (ptype);
5584 gimplify_assign (y3, unshare_expr (ref), ilist);
5586 if (is_simd)
5588 y4 = create_tmp_var (ptype);
5589 gimplify_assign (y4, ref, dlist);
5593 tree i = create_tmp_var (TREE_TYPE (v));
5594 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5595 tree body = create_artificial_label (UNKNOWN_LOCATION);
5596 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5597 if (y2)
5599 i2 = create_tmp_var (TREE_TYPE (v));
5600 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5601 body2 = create_artificial_label (UNKNOWN_LOCATION);
5602 end2 = create_artificial_label (UNKNOWN_LOCATION);
5603 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5605 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5607 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5608 tree decl_placeholder
5609 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5610 SET_DECL_VALUE_EXPR (decl_placeholder,
5611 build_simple_mem_ref (y1));
5612 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5613 SET_DECL_VALUE_EXPR (placeholder,
5614 y3 ? build_simple_mem_ref (y3)
5615 : error_mark_node);
5616 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5617 x = lang_hooks.decls.omp_clause_default_ctor
5618 (c, build_simple_mem_ref (y1),
5619 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5620 if (x)
5621 gimplify_and_add (x, ilist);
5622 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5624 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5625 lower_omp (&tseq, ctx);
5626 gimple_seq_add_seq (ilist, tseq);
5628 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5629 if (is_simd)
5631 SET_DECL_VALUE_EXPR (decl_placeholder,
5632 build_simple_mem_ref (y2));
5633 SET_DECL_VALUE_EXPR (placeholder,
5634 build_simple_mem_ref (y4));
5635 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5636 lower_omp (&tseq, ctx);
5637 gimple_seq_add_seq (dlist, tseq);
5638 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5640 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5641 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5642 if (y2)
5644 x = lang_hooks.decls.omp_clause_dtor
5645 (c, build_simple_mem_ref (y2));
5646 if (x)
5647 gimplify_and_add (x, dlist);
5650 else
5652 x = omp_reduction_init (c, TREE_TYPE (type));
5653 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5655 /* reduction(-:var) sums up the partial results, so it
5656 acts identically to reduction(+:var). */
5657 if (code == MINUS_EXPR)
5658 code = PLUS_EXPR;
5660 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5661 if (is_simd)
5663 x = build2 (code, TREE_TYPE (type),
5664 build_simple_mem_ref (y4),
5665 build_simple_mem_ref (y2));
5666 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5669 gimple *g
5670 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5671 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5672 gimple_seq_add_stmt (ilist, g);
5673 if (y3)
5675 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5676 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5677 gimple_seq_add_stmt (ilist, g);
5679 g = gimple_build_assign (i, PLUS_EXPR, i,
5680 build_int_cst (TREE_TYPE (i), 1));
5681 gimple_seq_add_stmt (ilist, g);
5682 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5683 gimple_seq_add_stmt (ilist, g);
5684 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5685 if (y2)
5687 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5688 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5689 gimple_seq_add_stmt (dlist, g);
5690 if (y4)
5692 g = gimple_build_assign
5693 (y4, POINTER_PLUS_EXPR, y4,
5694 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5695 gimple_seq_add_stmt (dlist, g);
5697 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5698 build_int_cst (TREE_TYPE (i2), 1));
5699 gimple_seq_add_stmt (dlist, g);
5700 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5701 gimple_seq_add_stmt (dlist, g);
5702 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5704 if (allocator)
5706 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5707 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5708 gimple_seq_add_stmt (dlist, g);
5710 continue;
5712 else if (pass == 2)
5714 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5715 if (is_global_var (out))
5716 x = var;
5717 else if (is_omp_target (ctx->stmt))
5718 x = out;
5719 else
5721 bool by_ref = use_pointer_for_field (var, ctx);
5722 x = build_receiver_ref (var, by_ref, ctx);
5724 if (!omp_privatize_by_reference (var))
5725 x = build_fold_addr_expr (x);
5726 x = fold_convert (ptr_type_node, x);
5727 unsigned cnt = task_reduction_cnt - 1;
5728 if (!task_reduction_needs_orig_p)
5729 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5730 else
5731 cnt = task_reduction_cntorig - 1;
5732 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5733 size_int (cnt), NULL_TREE, NULL_TREE);
5734 gimplify_assign (r, x, ilist);
5735 continue;
5737 else if (pass == 3)
5739 tree type = TREE_TYPE (new_var);
5740 if (!omp_privatize_by_reference (var))
5741 type = build_pointer_type (type);
5742 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5744 unsigned cnt = task_reduction_cnt - 1;
5745 if (!task_reduction_needs_orig_p)
5746 cnt += (task_reduction_cntorig_full
5747 - task_reduction_cntorig);
5748 else
5749 cnt = task_reduction_cntorig - 1;
5750 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5751 size_int (cnt), NULL_TREE, NULL_TREE);
5753 else
5755 unsigned int idx = *ctx->task_reduction_map->get (c);
5756 tree off;
5757 if (ctx->task_reductions[1 + idx])
5758 off = fold_convert (sizetype,
5759 ctx->task_reductions[1 + idx]);
5760 else
5761 off = task_reduction_read (ilist, tskred_temp, sizetype,
5762 7 + 3 * idx + 1);
5763 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5764 tskred_base, off);
5766 x = fold_convert (type, x);
5767 tree t;
5768 if (omp_privatize_by_reference (var))
5770 gimplify_assign (new_var, x, ilist);
5771 t = new_var;
5772 new_var = build_simple_mem_ref (new_var);
5774 else
5776 t = create_tmp_var (type);
5777 gimplify_assign (t, x, ilist);
5778 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5779 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5781 t = fold_convert (build_pointer_type (boolean_type_node), t);
5782 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5783 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5784 cond = create_tmp_var (TREE_TYPE (t));
5785 gimplify_assign (cond, t, ilist);
5787 else if (is_variable_sized (var))
5789 /* For variable sized types, we need to allocate the
5790 actual storage here. Call alloca and store the
5791 result in the pointer decl that we created elsewhere. */
5792 if (pass == 0)
5793 continue;
5795 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5797 tree tmp;
5799 ptr = DECL_VALUE_EXPR (new_var);
5800 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5801 ptr = TREE_OPERAND (ptr, 0);
5802 gcc_assert (DECL_P (ptr));
5803 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5805 if (lower_private_allocate (var, new_var, allocator,
5806 allocate_ptr, ilist, ctx,
5807 false, x))
5808 tmp = allocate_ptr;
5809 else
5811 /* void *tmp = __builtin_alloca */
5812 tree atmp
5813 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5814 gcall *stmt
5815 = gimple_build_call (atmp, 2, x,
5816 size_int (DECL_ALIGN (var)));
5817 cfun->calls_alloca = 1;
5818 tmp = create_tmp_var_raw (ptr_type_node);
5819 gimple_add_tmp_var (tmp);
5820 gimple_call_set_lhs (stmt, tmp);
5822 gimple_seq_add_stmt (ilist, stmt);
5825 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5826 gimplify_assign (ptr, x, ilist);
5829 else if (omp_privatize_by_reference (var)
5830 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5831 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5833 /* For references that are being privatized for Fortran,
5834 allocate new backing storage for the new pointer
5835 variable. This allows us to avoid changing all the
5836 code that expects a pointer to something that expects
5837 a direct variable. */
5838 if (pass == 0)
5839 continue;
5841 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5842 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5844 x = build_receiver_ref (var, false, ctx);
5845 if (ctx->allocate_map)
5846 if (tree *allocatep = ctx->allocate_map->get (var))
5848 allocator = *allocatep;
5849 if (TREE_CODE (allocator) == TREE_LIST)
5850 allocator = TREE_PURPOSE (allocator);
5851 if (TREE_CODE (allocator) != INTEGER_CST)
5852 allocator = build_outer_var_ref (allocator, ctx);
5853 allocator = fold_convert (pointer_sized_int_node,
5854 allocator);
5855 allocate_ptr = unshare_expr (x);
5857 if (allocator == NULL_TREE)
5858 x = build_fold_addr_expr_loc (clause_loc, x);
5860 else if (lower_private_allocate (var, new_var, allocator,
5861 allocate_ptr,
5862 ilist, ctx, true, x))
5863 x = allocate_ptr;
5864 else if (TREE_CONSTANT (x))
5866 /* For reduction in SIMD loop, defer adding the
5867 initialization of the reference, because if we decide
5868 to use SIMD array for it, the initilization could cause
5869 expansion ICE. Ditto for other privatization clauses. */
5870 if (is_simd)
5871 x = NULL_TREE;
5872 else
5874 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5875 get_name (var));
5876 gimple_add_tmp_var (x);
5877 TREE_ADDRESSABLE (x) = 1;
5878 x = build_fold_addr_expr_loc (clause_loc, x);
5881 else
5883 tree atmp
5884 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5885 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5886 tree al = size_int (TYPE_ALIGN (rtype));
5887 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5890 if (x)
5892 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5893 gimplify_assign (new_var, x, ilist);
5896 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5898 else if ((c_kind == OMP_CLAUSE_REDUCTION
5899 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5900 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5902 if (pass == 0)
5903 continue;
5905 else if (pass != 0)
5906 continue;
5908 switch (OMP_CLAUSE_CODE (c))
5910 case OMP_CLAUSE_SHARED:
5911 /* Ignore shared directives in teams construct inside
5912 target construct. */
5913 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5914 && !is_host_teams_ctx (ctx))
5915 continue;
5916 /* Shared global vars are just accessed directly. */
5917 if (is_global_var (new_var))
5918 break;
5919 /* For taskloop firstprivate/lastprivate, represented
5920 as firstprivate and shared clause on the task, new_var
5921 is the firstprivate var. */
5922 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5923 break;
5924 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5925 needs to be delayed until after fixup_child_record_type so
5926 that we get the correct type during the dereference. */
5927 by_ref = use_pointer_for_field (var, ctx);
5928 x = build_receiver_ref (var, by_ref, ctx);
5929 SET_DECL_VALUE_EXPR (new_var, x);
5930 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5932 /* ??? If VAR is not passed by reference, and the variable
5933 hasn't been initialized yet, then we'll get a warning for
5934 the store into the omp_data_s structure. Ideally, we'd be
5935 able to notice this and not store anything at all, but
5936 we're generating code too early. Suppress the warning. */
5937 if (!by_ref)
5938 suppress_warning (var, OPT_Wuninitialized);
5939 break;
5941 case OMP_CLAUSE__CONDTEMP_:
5942 if (is_parallel_ctx (ctx))
5944 x = build_receiver_ref (var, false, ctx);
5945 SET_DECL_VALUE_EXPR (new_var, x);
5946 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5948 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5950 x = build_zero_cst (TREE_TYPE (var));
5951 goto do_private;
5953 break;
5955 case OMP_CLAUSE_LASTPRIVATE:
5956 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5957 break;
5958 /* FALLTHRU */
5960 case OMP_CLAUSE_PRIVATE:
5961 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5962 x = build_outer_var_ref (var, ctx);
5963 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5965 if (is_task_ctx (ctx))
5966 x = build_receiver_ref (var, false, ctx);
5967 else
5968 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5970 else
5971 x = NULL;
5972 do_private:
5973 tree nx;
5974 bool copy_ctor;
5975 copy_ctor = false;
5976 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5977 ilist, ctx, false, NULL_TREE);
5978 nx = unshare_expr (new_var);
5979 if (is_simd
5980 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5981 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5982 copy_ctor = true;
5983 if (copy_ctor)
5984 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5985 else
5986 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5987 if (is_simd)
5989 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5990 if ((TREE_ADDRESSABLE (new_var) || nx || y
5991 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5992 && (gimple_omp_for_collapse (ctx->stmt) != 1
5993 || (gimple_omp_for_index (ctx->stmt, 0)
5994 != new_var)))
5995 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5996 || omp_privatize_by_reference (var))
5997 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5998 ivar, lvar))
6000 if (omp_privatize_by_reference (var))
6002 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6003 tree new_vard = TREE_OPERAND (new_var, 0);
6004 gcc_assert (DECL_P (new_vard));
6005 SET_DECL_VALUE_EXPR (new_vard,
6006 build_fold_addr_expr (lvar));
6007 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6010 if (nx)
6012 tree iv = unshare_expr (ivar);
6013 if (copy_ctor)
6014 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
6016 else
6017 x = lang_hooks.decls.omp_clause_default_ctor (c,
6021 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
6023 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
6024 unshare_expr (ivar), x);
6025 nx = x;
6027 if (nx && x)
6028 gimplify_and_add (x, &llist[0]);
6029 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6030 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6032 tree v = new_var;
6033 if (!DECL_P (v))
6035 gcc_assert (TREE_CODE (v) == MEM_REF);
6036 v = TREE_OPERAND (v, 0);
6037 gcc_assert (DECL_P (v));
6039 v = *ctx->lastprivate_conditional_map->get (v);
6040 tree t = create_tmp_var (TREE_TYPE (v));
6041 tree z = build_zero_cst (TREE_TYPE (v));
6042 tree orig_v
6043 = build_outer_var_ref (var, ctx,
6044 OMP_CLAUSE_LASTPRIVATE);
6045 gimple_seq_add_stmt (dlist,
6046 gimple_build_assign (t, z));
6047 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
6048 tree civar = DECL_VALUE_EXPR (v);
6049 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
6050 civar = unshare_expr (civar);
6051 TREE_OPERAND (civar, 1) = sctx.idx;
6052 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
6053 unshare_expr (civar));
6054 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
6055 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
6056 orig_v, unshare_expr (ivar)));
6057 tree cond = build2 (LT_EXPR, boolean_type_node, t,
6058 civar);
6059 x = build3 (COND_EXPR, void_type_node, cond, x,
6060 void_node);
6061 gimple_seq tseq = NULL;
6062 gimplify_and_add (x, &tseq);
6063 if (ctx->outer)
6064 lower_omp (&tseq, ctx->outer);
6065 gimple_seq_add_seq (&llist[1], tseq);
6067 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6068 && ctx->for_simd_scan_phase)
6070 x = unshare_expr (ivar);
6071 tree orig_v
6072 = build_outer_var_ref (var, ctx,
6073 OMP_CLAUSE_LASTPRIVATE);
6074 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6075 orig_v);
6076 gimplify_and_add (x, &llist[0]);
6078 if (y)
6080 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
6081 if (y)
6082 gimplify_and_add (y, &llist[1]);
6084 break;
6086 if (omp_privatize_by_reference (var))
6088 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6089 tree new_vard = TREE_OPERAND (new_var, 0);
6090 gcc_assert (DECL_P (new_vard));
6091 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6092 x = TYPE_SIZE_UNIT (type);
6093 if (TREE_CONSTANT (x))
6095 x = create_tmp_var_raw (type, get_name (var));
6096 gimple_add_tmp_var (x);
6097 TREE_ADDRESSABLE (x) = 1;
6098 x = build_fold_addr_expr_loc (clause_loc, x);
6099 x = fold_convert_loc (clause_loc,
6100 TREE_TYPE (new_vard), x);
6101 gimplify_assign (new_vard, x, ilist);
6105 if (nx)
6106 gimplify_and_add (nx, ilist);
6107 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6108 && is_simd
6109 && ctx->for_simd_scan_phase)
6111 tree orig_v = build_outer_var_ref (var, ctx,
6112 OMP_CLAUSE_LASTPRIVATE);
6113 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
6114 orig_v);
6115 gimplify_and_add (x, ilist);
6117 /* FALLTHRU */
6119 do_dtor:
6120 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
6121 if (x)
6122 gimplify_and_add (x, dlist);
6123 if (allocator)
6125 if (!is_gimple_val (allocator))
6127 tree avar = create_tmp_var (TREE_TYPE (allocator));
6128 gimplify_assign (avar, allocator, dlist);
6129 allocator = avar;
6131 if (!is_gimple_val (allocate_ptr))
6133 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6134 gimplify_assign (apvar, allocate_ptr, dlist);
6135 allocate_ptr = apvar;
6137 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
6138 gimple *g
6139 = gimple_build_call (f, 2, allocate_ptr, allocator);
6140 gimple_seq_add_stmt (dlist, g);
6142 break;
6144 case OMP_CLAUSE_LINEAR:
6145 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6146 goto do_firstprivate;
6147 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6148 x = NULL;
6149 else
6150 x = build_outer_var_ref (var, ctx);
6151 goto do_private;
6153 case OMP_CLAUSE_FIRSTPRIVATE:
6154 if (is_task_ctx (ctx))
6156 if ((omp_privatize_by_reference (var)
6157 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6158 || is_variable_sized (var))
6159 goto do_dtor;
6160 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
6161 ctx))
6162 || use_pointer_for_field (var, NULL))
6164 x = build_receiver_ref (var, false, ctx);
6165 if (ctx->allocate_map)
6166 if (tree *allocatep = ctx->allocate_map->get (var))
6168 allocator = *allocatep;
6169 if (TREE_CODE (allocator) == TREE_LIST)
6170 allocator = TREE_PURPOSE (allocator);
6171 if (TREE_CODE (allocator) != INTEGER_CST)
6172 allocator = build_outer_var_ref (allocator, ctx);
6173 allocator = fold_convert (pointer_sized_int_node,
6174 allocator);
6175 allocate_ptr = unshare_expr (x);
6176 x = build_simple_mem_ref (x);
6177 TREE_THIS_NOTRAP (x) = 1;
6179 SET_DECL_VALUE_EXPR (new_var, x);
6180 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6181 goto do_dtor;
6184 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6185 && omp_privatize_by_reference (var))
6187 x = build_outer_var_ref (var, ctx);
6188 gcc_assert (TREE_CODE (x) == MEM_REF
6189 && integer_zerop (TREE_OPERAND (x, 1)));
6190 x = TREE_OPERAND (x, 0);
6191 x = lang_hooks.decls.omp_clause_copy_ctor
6192 (c, unshare_expr (new_var), x);
6193 gimplify_and_add (x, ilist);
6194 goto do_dtor;
6196 do_firstprivate:
6197 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6198 ilist, ctx, false, NULL_TREE);
6199 x = build_outer_var_ref (var, ctx);
6200 if (is_simd)
6202 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6203 && gimple_omp_for_combined_into_p (ctx->stmt))
6205 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6206 if (DECL_P (t))
6207 t = build_outer_var_ref (t, ctx);
6208 tree stept = TREE_TYPE (t);
6209 tree ct = omp_find_clause (clauses,
6210 OMP_CLAUSE__LOOPTEMP_);
6211 gcc_assert (ct);
6212 tree l = OMP_CLAUSE_DECL (ct);
6213 tree n1 = fd->loop.n1;
6214 tree step = fd->loop.step;
6215 tree itype = TREE_TYPE (l);
6216 if (POINTER_TYPE_P (itype))
6217 itype = signed_type_for (itype);
6218 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6219 if (TYPE_UNSIGNED (itype)
6220 && fd->loop.cond_code == GT_EXPR)
6221 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6222 fold_build1 (NEGATE_EXPR, itype, l),
6223 fold_build1 (NEGATE_EXPR,
6224 itype, step));
6225 else
6226 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6227 t = fold_build2 (MULT_EXPR, stept,
6228 fold_convert (stept, l), t);
6230 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6232 if (omp_privatize_by_reference (var))
6234 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6235 tree new_vard = TREE_OPERAND (new_var, 0);
6236 gcc_assert (DECL_P (new_vard));
6237 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6238 nx = TYPE_SIZE_UNIT (type);
6239 if (TREE_CONSTANT (nx))
6241 nx = create_tmp_var_raw (type,
6242 get_name (var));
6243 gimple_add_tmp_var (nx);
6244 TREE_ADDRESSABLE (nx) = 1;
6245 nx = build_fold_addr_expr_loc (clause_loc,
6246 nx);
6247 nx = fold_convert_loc (clause_loc,
6248 TREE_TYPE (new_vard),
6249 nx);
6250 gimplify_assign (new_vard, nx, ilist);
6254 x = lang_hooks.decls.omp_clause_linear_ctor
6255 (c, new_var, x, t);
6256 gimplify_and_add (x, ilist);
6257 goto do_dtor;
6260 if (POINTER_TYPE_P (TREE_TYPE (x)))
6261 x = fold_build_pointer_plus (x, t);
6262 else
6263 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x,
6264 fold_convert (TREE_TYPE (x), t));
6267 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6268 || TREE_ADDRESSABLE (new_var)
6269 || omp_privatize_by_reference (var))
6270 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6271 ivar, lvar))
6273 if (omp_privatize_by_reference (var))
6275 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6276 tree new_vard = TREE_OPERAND (new_var, 0);
6277 gcc_assert (DECL_P (new_vard));
6278 SET_DECL_VALUE_EXPR (new_vard,
6279 build_fold_addr_expr (lvar));
6280 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6282 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6284 tree iv = create_tmp_var (TREE_TYPE (new_var));
6285 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6286 gimplify_and_add (x, ilist);
6287 gimple_stmt_iterator gsi
6288 = gsi_start (*gimple_omp_body_ptr (ctx->stmt));
6289 gassign *g
6290 = gimple_build_assign (unshare_expr (lvar), iv);
6291 gsi_insert_before_without_update (&gsi, g,
6292 GSI_SAME_STMT);
6293 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6294 enum tree_code code = PLUS_EXPR;
6295 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6296 code = POINTER_PLUS_EXPR;
6297 g = gimple_build_assign (iv, code, iv, t);
6298 gsi_insert_before_without_update (&gsi, g,
6299 GSI_SAME_STMT);
6300 break;
6302 x = lang_hooks.decls.omp_clause_copy_ctor
6303 (c, unshare_expr (ivar), x);
6304 gimplify_and_add (x, &llist[0]);
6305 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6306 if (x)
6307 gimplify_and_add (x, &llist[1]);
6308 break;
6310 if (omp_privatize_by_reference (var))
6312 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6313 tree new_vard = TREE_OPERAND (new_var, 0);
6314 gcc_assert (DECL_P (new_vard));
6315 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6316 nx = TYPE_SIZE_UNIT (type);
6317 if (TREE_CONSTANT (nx))
6319 nx = create_tmp_var_raw (type, get_name (var));
6320 gimple_add_tmp_var (nx);
6321 TREE_ADDRESSABLE (nx) = 1;
6322 nx = build_fold_addr_expr_loc (clause_loc, nx);
6323 nx = fold_convert_loc (clause_loc,
6324 TREE_TYPE (new_vard), nx);
6325 gimplify_assign (new_vard, nx, ilist);
6329 x = lang_hooks.decls.omp_clause_copy_ctor
6330 (c, unshare_expr (new_var), x);
6331 gimplify_and_add (x, ilist);
6332 goto do_dtor;
6334 case OMP_CLAUSE__LOOPTEMP_:
6335 case OMP_CLAUSE__REDUCTEMP_:
6336 gcc_assert (is_taskreg_ctx (ctx));
6337 x = build_outer_var_ref (var, ctx);
6338 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6339 gimplify_and_add (x, ilist);
6340 break;
6342 case OMP_CLAUSE_COPYIN:
6343 by_ref = use_pointer_for_field (var, NULL);
6344 x = build_receiver_ref (var, by_ref, ctx);
6345 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6346 append_to_statement_list (x, &copyin_seq);
6347 copyin_by_ref |= by_ref;
6348 break;
6350 case OMP_CLAUSE_REDUCTION:
6351 case OMP_CLAUSE_IN_REDUCTION:
6352 /* OpenACC reductions are initialized using the
6353 GOACC_REDUCTION internal function. */
6354 if (is_gimple_omp_oacc (ctx->stmt))
6355 break;
6356 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6358 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6359 gimple *tseq;
6360 tree ptype = TREE_TYPE (placeholder);
6361 if (cond)
6363 x = error_mark_node;
6364 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6365 && !task_reduction_needs_orig_p)
6366 x = var;
6367 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6369 tree pptype = build_pointer_type (ptype);
6370 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6371 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6372 size_int (task_reduction_cnt_full
6373 + task_reduction_cntorig - 1),
6374 NULL_TREE, NULL_TREE);
6375 else
6377 unsigned int idx
6378 = *ctx->task_reduction_map->get (c);
6379 x = task_reduction_read (ilist, tskred_temp,
6380 pptype, 7 + 3 * idx);
6382 x = fold_convert (pptype, x);
6383 x = build_simple_mem_ref (x);
6386 else
6388 lower_private_allocate (var, new_var, allocator,
6389 allocate_ptr, ilist, ctx, false,
6390 NULL_TREE);
6391 x = build_outer_var_ref (var, ctx);
6393 if (omp_privatize_by_reference (var)
6394 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6395 x = build_fold_addr_expr_loc (clause_loc, x);
6397 SET_DECL_VALUE_EXPR (placeholder, x);
6398 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6399 tree new_vard = new_var;
6400 if (omp_privatize_by_reference (var))
6402 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6403 new_vard = TREE_OPERAND (new_var, 0);
6404 gcc_assert (DECL_P (new_vard));
6406 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6407 if (is_simd
6408 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6409 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6410 rvarp = &rvar;
6411 if (is_simd
6412 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6413 ivar, lvar, rvarp,
6414 &rvar2))
6416 if (new_vard == new_var)
6418 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6419 SET_DECL_VALUE_EXPR (new_var, ivar);
6421 else
6423 SET_DECL_VALUE_EXPR (new_vard,
6424 build_fold_addr_expr (ivar));
6425 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6427 x = lang_hooks.decls.omp_clause_default_ctor
6428 (c, unshare_expr (ivar),
6429 build_outer_var_ref (var, ctx));
6430 if (rvarp && ctx->for_simd_scan_phase)
6432 if (x)
6433 gimplify_and_add (x, &llist[0]);
6434 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6435 if (x)
6436 gimplify_and_add (x, &llist[1]);
6437 break;
6439 else if (rvarp)
6441 if (x)
6443 gimplify_and_add (x, &llist[0]);
6445 tree ivar2 = unshare_expr (lvar);
6446 TREE_OPERAND (ivar2, 1) = sctx.idx;
6447 x = lang_hooks.decls.omp_clause_default_ctor
6448 (c, ivar2, build_outer_var_ref (var, ctx));
6449 gimplify_and_add (x, &llist[0]);
6451 if (rvar2)
6453 x = lang_hooks.decls.omp_clause_default_ctor
6454 (c, unshare_expr (rvar2),
6455 build_outer_var_ref (var, ctx));
6456 gimplify_and_add (x, &llist[0]);
6459 /* For types that need construction, add another
6460 private var which will be default constructed
6461 and optionally initialized with
6462 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6463 loop we want to assign this value instead of
6464 constructing and destructing it in each
6465 iteration. */
6466 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6467 gimple_add_tmp_var (nv);
6468 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6469 ? rvar2
6470 : ivar, 0),
6471 nv);
6472 x = lang_hooks.decls.omp_clause_default_ctor
6473 (c, nv, build_outer_var_ref (var, ctx));
6474 gimplify_and_add (x, ilist);
6476 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6478 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6479 x = DECL_VALUE_EXPR (new_vard);
6480 tree vexpr = nv;
6481 if (new_vard != new_var)
6482 vexpr = build_fold_addr_expr (nv);
6483 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6484 lower_omp (&tseq, ctx);
6485 SET_DECL_VALUE_EXPR (new_vard, x);
6486 gimple_seq_add_seq (ilist, tseq);
6487 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6490 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6491 if (x)
6492 gimplify_and_add (x, dlist);
6495 tree ref = build_outer_var_ref (var, ctx);
6496 x = unshare_expr (ivar);
6497 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6498 ref);
6499 gimplify_and_add (x, &llist[0]);
6501 ref = build_outer_var_ref (var, ctx);
6502 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6503 rvar);
6504 gimplify_and_add (x, &llist[3]);
6506 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6507 if (new_vard == new_var)
6508 SET_DECL_VALUE_EXPR (new_var, lvar);
6509 else
6510 SET_DECL_VALUE_EXPR (new_vard,
6511 build_fold_addr_expr (lvar));
6513 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6514 if (x)
6515 gimplify_and_add (x, &llist[1]);
6517 tree ivar2 = unshare_expr (lvar);
6518 TREE_OPERAND (ivar2, 1) = sctx.idx;
6519 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6520 if (x)
6521 gimplify_and_add (x, &llist[1]);
6523 if (rvar2)
6525 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6526 if (x)
6527 gimplify_and_add (x, &llist[1]);
6529 break;
6531 if (x)
6532 gimplify_and_add (x, &llist[0]);
6533 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6535 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6536 lower_omp (&tseq, ctx);
6537 gimple_seq_add_seq (&llist[0], tseq);
6539 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6540 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6541 lower_omp (&tseq, ctx);
6542 gimple_seq_add_seq (&llist[1], tseq);
6543 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6544 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6545 if (new_vard == new_var)
6546 SET_DECL_VALUE_EXPR (new_var, lvar);
6547 else
6548 SET_DECL_VALUE_EXPR (new_vard,
6549 build_fold_addr_expr (lvar));
6550 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6551 if (x)
6552 gimplify_and_add (x, &llist[1]);
6553 break;
6555 /* If this is a reference to constant size reduction var
6556 with placeholder, we haven't emitted the initializer
6557 for it because it is undesirable if SIMD arrays are used.
6558 But if they aren't used, we need to emit the deferred
6559 initialization now. */
6560 else if (omp_privatize_by_reference (var) && is_simd)
6561 handle_simd_reference (clause_loc, new_vard, ilist);
6563 tree lab2 = NULL_TREE;
6564 if (cond)
6566 gimple *g;
6567 if (!is_parallel_ctx (ctx))
6569 tree condv = create_tmp_var (boolean_type_node);
6570 tree m = build_simple_mem_ref (cond);
6571 g = gimple_build_assign (condv, m);
6572 gimple_seq_add_stmt (ilist, g);
6573 tree lab1
6574 = create_artificial_label (UNKNOWN_LOCATION);
6575 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6576 g = gimple_build_cond (NE_EXPR, condv,
6577 boolean_false_node,
6578 lab2, lab1);
6579 gimple_seq_add_stmt (ilist, g);
6580 gimple_seq_add_stmt (ilist,
6581 gimple_build_label (lab1));
6583 g = gimple_build_assign (build_simple_mem_ref (cond),
6584 boolean_true_node);
6585 gimple_seq_add_stmt (ilist, g);
6587 x = lang_hooks.decls.omp_clause_default_ctor
6588 (c, unshare_expr (new_var),
6589 cond ? NULL_TREE
6590 : build_outer_var_ref (var, ctx));
6591 if (x)
6592 gimplify_and_add (x, ilist);
6594 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6595 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6597 if (ctx->for_simd_scan_phase)
6598 goto do_dtor;
6599 if (x || (!is_simd
6600 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6602 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6603 gimple_add_tmp_var (nv);
6604 ctx->cb.decl_map->put (new_vard, nv);
6605 x = lang_hooks.decls.omp_clause_default_ctor
6606 (c, nv, build_outer_var_ref (var, ctx));
6607 if (x)
6608 gimplify_and_add (x, ilist);
6609 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6611 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6612 tree vexpr = nv;
6613 if (new_vard != new_var)
6614 vexpr = build_fold_addr_expr (nv);
6615 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6616 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6617 lower_omp (&tseq, ctx);
6618 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6619 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6620 gimple_seq_add_seq (ilist, tseq);
6622 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6623 if (is_simd && ctx->scan_exclusive)
6625 tree nv2
6626 = create_tmp_var_raw (TREE_TYPE (new_var));
6627 gimple_add_tmp_var (nv2);
6628 ctx->cb.decl_map->put (nv, nv2);
6629 x = lang_hooks.decls.omp_clause_default_ctor
6630 (c, nv2, build_outer_var_ref (var, ctx));
6631 gimplify_and_add (x, ilist);
6632 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6633 if (x)
6634 gimplify_and_add (x, dlist);
6636 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6637 if (x)
6638 gimplify_and_add (x, dlist);
6640 else if (is_simd
6641 && ctx->scan_exclusive
6642 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6644 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6645 gimple_add_tmp_var (nv2);
6646 ctx->cb.decl_map->put (new_vard, nv2);
6647 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6648 if (x)
6649 gimplify_and_add (x, dlist);
6651 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6652 goto do_dtor;
6655 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6657 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6658 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6659 && is_omp_target (ctx->stmt))
6661 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6662 tree oldv = NULL_TREE;
6663 gcc_assert (d);
6664 if (DECL_HAS_VALUE_EXPR_P (d))
6665 oldv = DECL_VALUE_EXPR (d);
6666 SET_DECL_VALUE_EXPR (d, new_vard);
6667 DECL_HAS_VALUE_EXPR_P (d) = 1;
6668 lower_omp (&tseq, ctx);
6669 if (oldv)
6670 SET_DECL_VALUE_EXPR (d, oldv);
6671 else
6673 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6674 DECL_HAS_VALUE_EXPR_P (d) = 0;
6677 else
6678 lower_omp (&tseq, ctx);
6679 gimple_seq_add_seq (ilist, tseq);
6681 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6682 if (is_simd)
6684 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6685 lower_omp (&tseq, ctx);
6686 gimple_seq_add_seq (dlist, tseq);
6687 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6689 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6690 if (cond)
6692 if (lab2)
6693 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6694 break;
6696 goto do_dtor;
6698 else
6700 x = omp_reduction_init (c, TREE_TYPE (new_var));
6701 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6702 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6704 if (cond)
6706 gimple *g;
6707 tree lab2 = NULL_TREE;
6708 /* GOMP_taskgroup_reduction_register memsets the whole
6709 array to zero. If the initializer is zero, we don't
6710 need to initialize it again, just mark it as ever
6711 used unconditionally, i.e. cond = true. */
6712 if (initializer_zerop (x))
6714 g = gimple_build_assign (build_simple_mem_ref (cond),
6715 boolean_true_node);
6716 gimple_seq_add_stmt (ilist, g);
6717 break;
6720 /* Otherwise, emit
6721 if (!cond) { cond = true; new_var = x; } */
6722 if (!is_parallel_ctx (ctx))
6724 tree condv = create_tmp_var (boolean_type_node);
6725 tree m = build_simple_mem_ref (cond);
6726 g = gimple_build_assign (condv, m);
6727 gimple_seq_add_stmt (ilist, g);
6728 tree lab1
6729 = create_artificial_label (UNKNOWN_LOCATION);
6730 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6731 g = gimple_build_cond (NE_EXPR, condv,
6732 boolean_false_node,
6733 lab2, lab1);
6734 gimple_seq_add_stmt (ilist, g);
6735 gimple_seq_add_stmt (ilist,
6736 gimple_build_label (lab1));
6738 g = gimple_build_assign (build_simple_mem_ref (cond),
6739 boolean_true_node);
6740 gimple_seq_add_stmt (ilist, g);
6741 gimplify_assign (new_var, x, ilist);
6742 if (lab2)
6743 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6744 break;
6747 /* reduction(-:var) sums up the partial results, so it
6748 acts identically to reduction(+:var). */
6749 if (code == MINUS_EXPR)
6750 code = PLUS_EXPR;
6752 bool is_truth_op
6753 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6754 tree new_vard = new_var;
6755 if (is_simd && omp_privatize_by_reference (var))
6757 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6758 new_vard = TREE_OPERAND (new_var, 0);
6759 gcc_assert (DECL_P (new_vard));
6761 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6762 if (is_simd
6763 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6764 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6765 rvarp = &rvar;
6766 if (is_simd
6767 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6768 ivar, lvar, rvarp,
6769 &rvar2))
6771 if (new_vard != new_var)
6773 SET_DECL_VALUE_EXPR (new_vard,
6774 build_fold_addr_expr (lvar));
6775 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6778 tree ref = build_outer_var_ref (var, ctx);
6780 if (rvarp)
6782 if (ctx->for_simd_scan_phase)
6783 break;
6784 gimplify_assign (ivar, ref, &llist[0]);
6785 ref = build_outer_var_ref (var, ctx);
6786 gimplify_assign (ref, rvar, &llist[3]);
6787 break;
6790 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6792 if (sctx.is_simt)
6794 if (!simt_lane)
6795 simt_lane = create_tmp_var (unsigned_type_node);
6796 x = build_call_expr_internal_loc
6797 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6798 TREE_TYPE (ivar), 2, ivar, simt_lane);
6799 /* Make sure x is evaluated unconditionally. */
6800 tree bfly_var = create_tmp_var (TREE_TYPE (ivar));
6801 gimplify_assign (bfly_var, x, &llist[2]);
6802 x = build2 (code, TREE_TYPE (ivar), ivar, bfly_var);
6803 gimplify_assign (ivar, x, &llist[2]);
6805 tree ivar2 = ivar;
6806 tree ref2 = ref;
6807 if (is_truth_op)
6809 tree zero = build_zero_cst (TREE_TYPE (ivar));
6810 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6811 boolean_type_node, ivar,
6812 zero);
6813 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6814 boolean_type_node, ref,
6815 zero);
6817 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6818 if (is_truth_op)
6819 x = fold_convert (TREE_TYPE (ref), x);
6820 ref = build_outer_var_ref (var, ctx);
6821 gimplify_assign (ref, x, &llist[1]);
6824 else
6826 lower_private_allocate (var, new_var, allocator,
6827 allocate_ptr, ilist, ctx,
6828 false, NULL_TREE);
6829 if (omp_privatize_by_reference (var) && is_simd)
6830 handle_simd_reference (clause_loc, new_vard, ilist);
6831 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6832 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6833 break;
6834 gimplify_assign (new_var, x, ilist);
6835 if (is_simd)
6837 tree ref = build_outer_var_ref (var, ctx);
6838 tree new_var2 = new_var;
6839 tree ref2 = ref;
6840 if (is_truth_op)
6842 tree zero = build_zero_cst (TREE_TYPE (new_var));
6843 new_var2
6844 = fold_build2_loc (clause_loc, NE_EXPR,
6845 boolean_type_node, new_var,
6846 zero);
6847 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6848 boolean_type_node, ref,
6849 zero);
6851 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6852 if (is_truth_op)
6853 x = fold_convert (TREE_TYPE (new_var), x);
6854 ref = build_outer_var_ref (var, ctx);
6855 gimplify_assign (ref, x, dlist);
6857 if (allocator)
6858 goto do_dtor;
6861 break;
6863 default:
6864 gcc_unreachable ();
6868 if (tskred_avar)
6870 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6871 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6874 if (known_eq (sctx.max_vf, 1U))
6876 sctx.is_simt = false;
6877 if (ctx->lastprivate_conditional_map)
6879 if (gimple_omp_for_combined_into_p (ctx->stmt))
6881 /* Signal to lower_omp_1 that it should use parent context. */
6882 ctx->combined_into_simd_safelen1 = true;
6883 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6884 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6885 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6887 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6888 omp_context *outer = ctx->outer;
6889 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6890 outer = outer->outer;
6891 tree *v = ctx->lastprivate_conditional_map->get (o);
6892 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6893 tree *pv = outer->lastprivate_conditional_map->get (po);
6894 *v = *pv;
6897 else
6899 /* When not vectorized, treat lastprivate(conditional:) like
6900 normal lastprivate, as there will be just one simd lane
6901 writing the privatized variable. */
6902 delete ctx->lastprivate_conditional_map;
6903 ctx->lastprivate_conditional_map = NULL;
6908 if (nonconst_simd_if)
6910 if (sctx.lane == NULL_TREE)
6912 sctx.idx = create_tmp_var (unsigned_type_node);
6913 sctx.lane = create_tmp_var (unsigned_type_node);
6915 /* FIXME: For now. */
6916 sctx.is_simt = false;
6919 if (sctx.lane || sctx.is_simt)
6921 uid = create_tmp_var (ptr_type_node, "simduid");
6922 /* Don't want uninit warnings on simduid, it is always uninitialized,
6923 but we use it not for the value, but for the DECL_UID only. */
6924 suppress_warning (uid, OPT_Wuninitialized);
6925 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6926 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6927 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6928 gimple_omp_for_set_clauses (ctx->stmt, c);
6930 /* Emit calls denoting privatized variables and initializing a pointer to
6931 structure that holds private variables as fields after ompdevlow pass. */
6932 if (sctx.is_simt)
6934 sctx.simt_eargs[0] = uid;
6935 gimple *g
6936 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6937 gimple_call_set_lhs (g, uid);
6938 gimple_seq_add_stmt (ilist, g);
6939 sctx.simt_eargs.release ();
6941 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6942 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6943 gimple_call_set_lhs (g, simtrec);
6944 gimple_seq_add_stmt (ilist, g);
6946 if (sctx.lane)
6948 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6949 2 + (nonconst_simd_if != NULL),
6950 uid, integer_zero_node,
6951 nonconst_simd_if);
6952 gimple_call_set_lhs (g, sctx.lane);
6953 gimple_stmt_iterator gsi = gsi_start (*gimple_omp_body_ptr (ctx->stmt));
6954 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6955 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6956 build_int_cst (unsigned_type_node, 0));
6957 gimple_seq_add_stmt (ilist, g);
6958 if (sctx.lastlane)
6960 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6961 2, uid, sctx.lane);
6962 gimple_call_set_lhs (g, sctx.lastlane);
6963 gimple_seq_add_stmt (dlist, g);
6964 gimple_seq_add_seq (dlist, llist[3]);
6966 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6967 if (llist[2])
6969 tree simt_vf = create_tmp_var (unsigned_type_node);
6970 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6971 gimple_call_set_lhs (g, simt_vf);
6972 gimple_seq_add_stmt (dlist, g);
6974 tree t = build_int_cst (unsigned_type_node, 1);
6975 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6976 gimple_seq_add_stmt (dlist, g);
6978 t = build_int_cst (unsigned_type_node, 0);
6979 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6980 gimple_seq_add_stmt (dlist, g);
6982 tree body = create_artificial_label (UNKNOWN_LOCATION);
6983 tree header = create_artificial_label (UNKNOWN_LOCATION);
6984 tree end = create_artificial_label (UNKNOWN_LOCATION);
6985 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6986 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6988 gimple_seq_add_seq (dlist, llist[2]);
6990 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6991 gimple_seq_add_stmt (dlist, g);
6993 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6994 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6995 gimple_seq_add_stmt (dlist, g);
6997 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6999 for (int i = 0; i < 2; i++)
7000 if (llist[i])
7002 tree vf = create_tmp_var (unsigned_type_node);
7003 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
7004 gimple_call_set_lhs (g, vf);
7005 gimple_seq *seq = i == 0 ? ilist : dlist;
7006 gimple_seq_add_stmt (seq, g);
7007 tree t = build_int_cst (unsigned_type_node, 0);
7008 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
7009 gimple_seq_add_stmt (seq, g);
7010 tree body = create_artificial_label (UNKNOWN_LOCATION);
7011 tree header = create_artificial_label (UNKNOWN_LOCATION);
7012 tree end = create_artificial_label (UNKNOWN_LOCATION);
7013 gimple_seq_add_stmt (seq, gimple_build_goto (header));
7014 gimple_seq_add_stmt (seq, gimple_build_label (body));
7015 gimple_seq_add_seq (seq, llist[i]);
7016 t = build_int_cst (unsigned_type_node, 1);
7017 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
7018 gimple_seq_add_stmt (seq, g);
7019 gimple_seq_add_stmt (seq, gimple_build_label (header));
7020 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
7021 gimple_seq_add_stmt (seq, g);
7022 gimple_seq_add_stmt (seq, gimple_build_label (end));
7025 if (sctx.is_simt)
7027 gimple_seq_add_seq (dlist, sctx.simt_dlist);
7028 gimple *g
7029 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
7030 gimple_seq_add_stmt (dlist, g);
7033 /* The copyin sequence is not to be executed by the main thread, since
7034 that would result in self-copies. Perhaps not visible to scalars,
7035 but it certainly is to C++ operator=. */
7036 if (copyin_seq)
7038 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
7040 x = build2 (NE_EXPR, boolean_type_node, x,
7041 build_int_cst (TREE_TYPE (x), 0));
7042 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
7043 gimplify_and_add (x, ilist);
7046 /* If any copyin variable is passed by reference, we must ensure the
7047 master thread doesn't modify it before it is copied over in all
7048 threads. Similarly for variables in both firstprivate and
7049 lastprivate clauses we need to ensure the lastprivate copying
7050 happens after firstprivate copying in all threads. And similarly
7051 for UDRs if initializer expression refers to omp_orig. */
7052 if (copyin_by_ref || lastprivate_firstprivate
7053 || (reduction_omp_orig_ref
7054 && !ctx->scan_inclusive
7055 && !ctx->scan_exclusive))
7057 /* Don't add any barrier for #pragma omp simd or
7058 #pragma omp distribute. */
7059 if (!is_task_ctx (ctx)
7060 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
7061 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
7062 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
7065 /* If max_vf is non-zero, then we can use only a vectorization factor
7066 up to the max_vf we chose. So stick it into the safelen clause. */
7067 if (maybe_ne (sctx.max_vf, 0U))
7069 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
7070 OMP_CLAUSE_SAFELEN);
7071 poly_uint64 safe_len;
7072 if (c == NULL_TREE
7073 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
7074 && maybe_gt (safe_len, sctx.max_vf)))
7076 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
7077 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
7078 sctx.max_vf);
7079 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
7080 gimple_omp_for_set_clauses (ctx->stmt, c);
7085 /* Create temporary variables for lastprivate(conditional:) implementation
7086 in context CTX with CLAUSES. */
7088 static void
7089 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
7091 tree iter_type = NULL_TREE;
7092 tree cond_ptr = NULL_TREE;
7093 tree iter_var = NULL_TREE;
7094 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7095 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
7096 tree next = *clauses;
7097 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
7098 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7099 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
7101 if (is_simd)
7103 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
7104 gcc_assert (cc);
7105 if (iter_type == NULL_TREE)
7107 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
7108 iter_var = create_tmp_var_raw (iter_type);
7109 DECL_CONTEXT (iter_var) = current_function_decl;
7110 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7111 DECL_CHAIN (iter_var) = ctx->block_vars;
7112 ctx->block_vars = iter_var;
7113 tree c3
7114 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7115 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7116 OMP_CLAUSE_DECL (c3) = iter_var;
7117 OMP_CLAUSE_CHAIN (c3) = *clauses;
7118 *clauses = c3;
7119 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7121 next = OMP_CLAUSE_CHAIN (cc);
7122 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7123 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
7124 ctx->lastprivate_conditional_map->put (o, v);
7125 continue;
7127 if (iter_type == NULL)
7129 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
7131 struct omp_for_data fd;
7132 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
7133 NULL);
7134 iter_type = unsigned_type_for (fd.iter_type);
7136 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
7137 iter_type = unsigned_type_node;
7138 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
7139 if (c2)
7141 cond_ptr
7142 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7143 OMP_CLAUSE_DECL (c2) = cond_ptr;
7145 else
7147 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7148 DECL_CONTEXT (cond_ptr) = current_function_decl;
7149 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7150 DECL_CHAIN (cond_ptr) = ctx->block_vars;
7151 ctx->block_vars = cond_ptr;
7152 c2 = build_omp_clause (UNKNOWN_LOCATION,
7153 OMP_CLAUSE__CONDTEMP_);
7154 OMP_CLAUSE_DECL (c2) = cond_ptr;
7155 OMP_CLAUSE_CHAIN (c2) = *clauses;
7156 *clauses = c2;
7158 iter_var = create_tmp_var_raw (iter_type);
7159 DECL_CONTEXT (iter_var) = current_function_decl;
7160 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7161 DECL_CHAIN (iter_var) = ctx->block_vars;
7162 ctx->block_vars = iter_var;
7163 tree c3
7164 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7165 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7166 OMP_CLAUSE_DECL (c3) = iter_var;
7167 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7168 OMP_CLAUSE_CHAIN (c2) = c3;
7169 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7171 tree v = create_tmp_var_raw (iter_type);
7172 DECL_CONTEXT (v) = current_function_decl;
7173 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7174 DECL_CHAIN (v) = ctx->block_vars;
7175 ctx->block_vars = v;
7176 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7177 ctx->lastprivate_conditional_map->put (o, v);
7182 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7183 both parallel and workshare constructs. PREDICATE may be NULL if it's
7184 always true. BODY_P is the sequence to insert early initialization
7185 if needed, STMT_LIST is where the non-conditional lastprivate handling
7186 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7187 section. */
7189 static void
7190 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7191 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7192 omp_context *ctx)
7194 tree x, c, label = NULL, orig_clauses = clauses;
7195 bool par_clauses = false;
7196 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7197 unsigned HOST_WIDE_INT conditional_off = 0;
7198 gimple_seq post_stmt_list = NULL;
7200 /* Early exit if there are no lastprivate or linear clauses. */
7201 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7202 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7203 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7204 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7205 break;
7206 if (clauses == NULL)
7208 /* If this was a workshare clause, see if it had been combined
7209 with its parallel. In that case, look for the clauses on the
7210 parallel statement itself. */
7211 if (is_parallel_ctx (ctx))
7212 return;
7214 ctx = ctx->outer;
7215 if (ctx == NULL || !is_parallel_ctx (ctx))
7216 return;
7218 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7219 OMP_CLAUSE_LASTPRIVATE);
7220 if (clauses == NULL)
7221 return;
7222 par_clauses = true;
7225 bool maybe_simt = false;
7226 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7227 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7229 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7230 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7231 if (simduid)
7232 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7235 if (predicate)
7237 gcond *stmt;
7238 tree label_true, arm1, arm2;
7239 enum tree_code pred_code = TREE_CODE (predicate);
7241 label = create_artificial_label (UNKNOWN_LOCATION);
7242 label_true = create_artificial_label (UNKNOWN_LOCATION);
7243 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7245 arm1 = TREE_OPERAND (predicate, 0);
7246 arm2 = TREE_OPERAND (predicate, 1);
7247 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7248 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7250 else
7252 arm1 = predicate;
7253 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7254 arm2 = boolean_false_node;
7255 pred_code = NE_EXPR;
7257 if (maybe_simt)
7259 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7260 c = fold_convert (integer_type_node, c);
7261 simtcond = create_tmp_var (integer_type_node);
7262 gimplify_assign (simtcond, c, stmt_list);
7263 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7264 1, simtcond);
7265 c = create_tmp_var (integer_type_node);
7266 gimple_call_set_lhs (g, c);
7267 gimple_seq_add_stmt (stmt_list, g);
7268 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7269 label_true, label);
7271 else
7272 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7273 gimple_seq_add_stmt (stmt_list, stmt);
7274 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7277 tree cond_ptr = NULL_TREE;
7278 for (c = clauses; c ;)
7280 tree var, new_var;
7281 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7282 gimple_seq *this_stmt_list = stmt_list;
7283 tree lab2 = NULL_TREE;
7285 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7286 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7287 && ctx->lastprivate_conditional_map
7288 && !ctx->combined_into_simd_safelen1)
7290 gcc_assert (body_p);
7291 if (simduid)
7292 goto next;
7293 if (cond_ptr == NULL_TREE)
7295 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7296 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7298 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7299 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7300 tree v = *ctx->lastprivate_conditional_map->get (o);
7301 gimplify_assign (v, build_zero_cst (type), body_p);
7302 this_stmt_list = cstmt_list;
7303 tree mem;
7304 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7306 mem = build2 (MEM_REF, type, cond_ptr,
7307 build_int_cst (TREE_TYPE (cond_ptr),
7308 conditional_off));
7309 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7311 else
7312 mem = build4 (ARRAY_REF, type, cond_ptr,
7313 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7314 tree mem2 = copy_node (mem);
7315 gimple_seq seq = NULL;
7316 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7317 gimple_seq_add_seq (this_stmt_list, seq);
7318 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7319 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7320 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7321 gimple_seq_add_stmt (this_stmt_list, g);
7322 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7323 gimplify_assign (mem2, v, this_stmt_list);
7325 else if (predicate
7326 && ctx->combined_into_simd_safelen1
7327 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7328 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7329 && ctx->lastprivate_conditional_map)
7330 this_stmt_list = &post_stmt_list;
7332 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7333 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7334 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7336 var = OMP_CLAUSE_DECL (c);
7337 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7338 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7339 && is_taskloop_ctx (ctx))
7341 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7342 new_var = lookup_decl (var, ctx->outer);
7344 else
7346 new_var = lookup_decl (var, ctx);
7347 /* Avoid uninitialized warnings for lastprivate and
7348 for linear iterators. */
7349 if (predicate
7350 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7351 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7352 suppress_warning (new_var, OPT_Wuninitialized);
7355 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7357 tree val = DECL_VALUE_EXPR (new_var);
7358 if (TREE_CODE (val) == ARRAY_REF
7359 && VAR_P (TREE_OPERAND (val, 0))
7360 && lookup_attribute ("omp simd array",
7361 DECL_ATTRIBUTES (TREE_OPERAND (val,
7362 0))))
7364 if (lastlane == NULL)
7366 lastlane = create_tmp_var (unsigned_type_node);
7367 gcall *g
7368 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7369 2, simduid,
7370 TREE_OPERAND (val, 1));
7371 gimple_call_set_lhs (g, lastlane);
7372 gimple_seq_add_stmt (this_stmt_list, g);
7374 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7375 TREE_OPERAND (val, 0), lastlane,
7376 NULL_TREE, NULL_TREE);
7377 TREE_THIS_NOTRAP (new_var) = 1;
7380 else if (maybe_simt)
7382 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7383 ? DECL_VALUE_EXPR (new_var)
7384 : new_var);
7385 if (simtlast == NULL)
7387 simtlast = create_tmp_var (unsigned_type_node);
7388 gcall *g = gimple_build_call_internal
7389 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7390 gimple_call_set_lhs (g, simtlast);
7391 gimple_seq_add_stmt (this_stmt_list, g);
7393 x = build_call_expr_internal_loc
7394 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7395 TREE_TYPE (val), 2, val, simtlast);
7396 new_var = unshare_expr (new_var);
7397 gimplify_assign (new_var, x, this_stmt_list);
7398 new_var = unshare_expr (new_var);
7401 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7402 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7404 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7405 gimple_seq_add_seq (this_stmt_list,
7406 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7407 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7409 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7410 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7412 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7413 gimple_seq_add_seq (this_stmt_list,
7414 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7415 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7418 x = NULL_TREE;
7419 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7420 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7421 && is_taskloop_ctx (ctx))
7423 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7424 ctx->outer->outer);
7425 if (is_global_var (ovar))
7426 x = ovar;
7428 if (!x)
7429 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7430 if (omp_privatize_by_reference (var))
7431 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7432 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7433 gimplify_and_add (x, this_stmt_list);
7435 if (lab2)
7436 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7439 next:
7440 c = OMP_CLAUSE_CHAIN (c);
7441 if (c == NULL && !par_clauses)
7443 /* If this was a workshare clause, see if it had been combined
7444 with its parallel. In that case, continue looking for the
7445 clauses also on the parallel statement itself. */
7446 if (is_parallel_ctx (ctx))
7447 break;
7449 ctx = ctx->outer;
7450 if (ctx == NULL || !is_parallel_ctx (ctx))
7451 break;
7453 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7454 OMP_CLAUSE_LASTPRIVATE);
7455 par_clauses = true;
7459 if (label)
7460 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7461 gimple_seq_add_seq (stmt_list, post_stmt_list);
7464 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7465 (which might be a placeholder). INNER is true if this is an inner
7466 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7467 join markers. Generate the before-loop forking sequence in
7468 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7469 general form of these sequences is
7471 GOACC_REDUCTION_SETUP
7472 GOACC_FORK
7473 GOACC_REDUCTION_INIT
7475 GOACC_REDUCTION_FINI
7476 GOACC_JOIN
7477 GOACC_REDUCTION_TEARDOWN. */
7479 static void
7480 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7481 gcall *fork, gcall *private_marker, gcall *join,
7482 gimple_seq *fork_seq, gimple_seq *join_seq,
7483 omp_context *ctx)
7485 gimple_seq before_fork = NULL;
7486 gimple_seq after_fork = NULL;
7487 gimple_seq before_join = NULL;
7488 gimple_seq after_join = NULL;
7489 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7490 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7491 unsigned offset = 0;
7493 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7494 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7496 /* No 'reduction' clauses on OpenACC 'kernels'. */
7497 gcc_checking_assert (!is_oacc_kernels (ctx));
7498 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7499 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7501 tree orig = OMP_CLAUSE_DECL (c);
7502 tree var = maybe_lookup_decl (orig, ctx);
7503 tree ref_to_res = NULL_TREE;
7504 tree incoming, outgoing, v1, v2, v3;
7505 bool is_private = false;
7507 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7508 if (rcode == MINUS_EXPR)
7509 rcode = PLUS_EXPR;
7510 else if (rcode == TRUTH_ANDIF_EXPR)
7511 rcode = BIT_AND_EXPR;
7512 else if (rcode == TRUTH_ORIF_EXPR)
7513 rcode = BIT_IOR_EXPR;
7514 tree op = build_int_cst (unsigned_type_node, rcode);
7516 if (!var)
7517 var = orig;
7519 incoming = outgoing = var;
7521 if (!inner)
7523 /* See if an outer construct also reduces this variable. */
7524 omp_context *outer = ctx;
7526 while (omp_context *probe = outer->outer)
7528 enum gimple_code type = gimple_code (probe->stmt);
7529 tree cls;
7531 switch (type)
7533 case GIMPLE_OMP_FOR:
7534 cls = gimple_omp_for_clauses (probe->stmt);
7535 break;
7537 case GIMPLE_OMP_TARGET:
7538 /* No 'reduction' clauses inside OpenACC 'kernels'
7539 regions. */
7540 gcc_checking_assert (!is_oacc_kernels (probe));
7542 if (!is_gimple_omp_offloaded (probe->stmt))
7543 goto do_lookup;
7545 cls = gimple_omp_target_clauses (probe->stmt);
7546 break;
7548 default:
7549 goto do_lookup;
7552 outer = probe;
7553 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7554 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7555 && orig == OMP_CLAUSE_DECL (cls))
7557 incoming = outgoing = lookup_decl (orig, probe);
7558 goto has_outer_reduction;
7560 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7561 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7562 && orig == OMP_CLAUSE_DECL (cls))
7564 is_private = true;
7565 goto do_lookup;
7569 do_lookup:
7570 /* This is the outermost construct with this reduction,
7571 see if there's a mapping for it. */
7572 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7573 && maybe_lookup_field (orig, outer) && !is_private)
7575 ref_to_res = build_receiver_ref (orig, false, outer);
7576 if (omp_privatize_by_reference (orig))
7577 ref_to_res = build_simple_mem_ref (ref_to_res);
7579 tree type = TREE_TYPE (var);
7580 if (POINTER_TYPE_P (type))
7581 type = TREE_TYPE (type);
7583 outgoing = var;
7584 incoming = omp_reduction_init_op (loc, rcode, type);
7586 else
7588 /* Try to look at enclosing contexts for reduction var,
7589 use original if no mapping found. */
7590 tree t = NULL_TREE;
7591 omp_context *c = ctx->outer;
7592 while (c && !t)
7594 t = maybe_lookup_decl (orig, c);
7595 c = c->outer;
7597 incoming = outgoing = (t ? t : orig);
7600 has_outer_reduction:;
7603 if (!ref_to_res)
7604 ref_to_res = integer_zero_node;
7606 if (omp_privatize_by_reference (orig))
7608 tree type = TREE_TYPE (var);
7609 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7611 if (!inner)
7613 tree x = create_tmp_var (TREE_TYPE (type), id);
7614 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7617 v1 = create_tmp_var (type, id);
7618 v2 = create_tmp_var (type, id);
7619 v3 = create_tmp_var (type, id);
7621 gimplify_assign (v1, var, fork_seq);
7622 gimplify_assign (v2, var, fork_seq);
7623 gimplify_assign (v3, var, fork_seq);
7625 var = build_simple_mem_ref (var);
7626 v1 = build_simple_mem_ref (v1);
7627 v2 = build_simple_mem_ref (v2);
7628 v3 = build_simple_mem_ref (v3);
7629 outgoing = build_simple_mem_ref (outgoing);
7631 if (!TREE_CONSTANT (incoming))
7632 incoming = build_simple_mem_ref (incoming);
7634 else
7635 /* Note that 'var' might be a mem ref. */
7636 v1 = v2 = v3 = var;
7638 /* Determine position in reduction buffer, which may be used
7639 by target. The parser has ensured that this is not a
7640 variable-sized type. */
7641 fixed_size_mode mode
7642 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7643 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7644 offset = (offset + align - 1) & ~(align - 1);
7645 tree off = build_int_cst (sizetype, offset);
7646 offset += GET_MODE_SIZE (mode);
7648 if (!init_code)
7650 init_code = build_int_cst (integer_type_node,
7651 IFN_GOACC_REDUCTION_INIT);
7652 fini_code = build_int_cst (integer_type_node,
7653 IFN_GOACC_REDUCTION_FINI);
7654 setup_code = build_int_cst (integer_type_node,
7655 IFN_GOACC_REDUCTION_SETUP);
7656 teardown_code = build_int_cst (integer_type_node,
7657 IFN_GOACC_REDUCTION_TEARDOWN);
7660 tree setup_call
7661 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7662 TREE_TYPE (var), 6, setup_code,
7663 unshare_expr (ref_to_res),
7664 unshare_expr (incoming),
7665 level, op, off);
7666 tree init_call
7667 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7668 TREE_TYPE (var), 6, init_code,
7669 unshare_expr (ref_to_res),
7670 unshare_expr (v1), level, op, off);
7671 tree fini_call
7672 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7673 TREE_TYPE (var), 6, fini_code,
7674 unshare_expr (ref_to_res),
7675 unshare_expr (v2), level, op, off);
7676 tree teardown_call
7677 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7678 TREE_TYPE (var), 6, teardown_code,
7679 ref_to_res, unshare_expr (v3),
7680 level, op, off);
7682 gimplify_assign (unshare_expr (v1), setup_call, &before_fork);
7683 gimplify_assign (unshare_expr (v2), init_call, &after_fork);
7684 gimplify_assign (unshare_expr (v3), fini_call, &before_join);
7685 gimplify_assign (unshare_expr (outgoing), teardown_call, &after_join);
7688 /* Now stitch things together. */
7689 gimple_seq_add_seq (fork_seq, before_fork);
7690 if (private_marker)
7691 gimple_seq_add_stmt (fork_seq, private_marker);
7692 if (fork)
7693 gimple_seq_add_stmt (fork_seq, fork);
7694 gimple_seq_add_seq (fork_seq, after_fork);
7696 gimple_seq_add_seq (join_seq, before_join);
7697 if (join)
7698 gimple_seq_add_stmt (join_seq, join);
7699 gimple_seq_add_seq (join_seq, after_join);
7702 /* Generate code to implement the REDUCTION clauses, append it
7703 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7704 that should be emitted also inside of the critical section,
7705 in that case clear *CLIST afterwards, otherwise leave it as is
7706 and let the caller emit it itself. */
7708 static void
7709 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7710 gimple_seq *clist, omp_context *ctx)
7712 gimple_seq sub_seq = NULL;
7713 gimple *stmt;
7714 tree x, c;
7715 int count = 0;
7717 /* OpenACC loop reductions are handled elsewhere. */
7718 if (is_gimple_omp_oacc (ctx->stmt))
7719 return;
7721 /* SIMD reductions are handled in lower_rec_input_clauses. */
7722 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7723 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7724 return;
7726 /* inscan reductions are handled elsewhere. */
7727 if (ctx->scan_inclusive || ctx->scan_exclusive)
7728 return;
7730 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7731 update in that case, otherwise use a lock. */
7732 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7733 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7734 && !OMP_CLAUSE_REDUCTION_TASK (c))
7736 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7737 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7739 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7740 count = -1;
7741 break;
7743 count++;
7746 if (count == 0)
7747 return;
7749 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7751 tree var, ref, new_var, orig_var;
7752 enum tree_code code;
7753 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7755 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7756 || OMP_CLAUSE_REDUCTION_TASK (c))
7757 continue;
7759 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7760 orig_var = var = OMP_CLAUSE_DECL (c);
7761 if (TREE_CODE (var) == MEM_REF)
7763 var = TREE_OPERAND (var, 0);
7764 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7765 var = TREE_OPERAND (var, 0);
7766 if (TREE_CODE (var) == ADDR_EXPR)
7767 var = TREE_OPERAND (var, 0);
7768 else
7770 /* If this is a pointer or referenced based array
7771 section, the var could be private in the outer
7772 context e.g. on orphaned loop construct. Pretend this
7773 is private variable's outer reference. */
7774 ccode = OMP_CLAUSE_PRIVATE;
7775 if (TREE_CODE (var) == INDIRECT_REF)
7776 var = TREE_OPERAND (var, 0);
7778 orig_var = var;
7779 if (is_variable_sized (var))
7781 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7782 var = DECL_VALUE_EXPR (var);
7783 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7784 var = TREE_OPERAND (var, 0);
7785 gcc_assert (DECL_P (var));
7788 new_var = lookup_decl (var, ctx);
7789 if (var == OMP_CLAUSE_DECL (c)
7790 && omp_privatize_by_reference (var))
7791 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7792 ref = build_outer_var_ref (var, ctx, ccode);
7793 code = OMP_CLAUSE_REDUCTION_CODE (c);
7795 /* reduction(-:var) sums up the partial results, so it acts
7796 identically to reduction(+:var). */
7797 if (code == MINUS_EXPR)
7798 code = PLUS_EXPR;
7800 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7801 if (count == 1)
7803 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7805 addr = save_expr (addr);
7806 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7807 tree new_var2 = new_var;
7808 tree ref2 = ref;
7809 if (is_truth_op)
7811 tree zero = build_zero_cst (TREE_TYPE (new_var));
7812 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7813 boolean_type_node, new_var, zero);
7814 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7815 ref, zero);
7817 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7818 new_var2);
7819 if (is_truth_op)
7820 x = fold_convert (TREE_TYPE (new_var), x);
7821 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7822 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7823 gimplify_and_add (x, stmt_seqp);
7824 return;
7826 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7828 tree d = OMP_CLAUSE_DECL (c);
7829 tree type = TREE_TYPE (d);
7830 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7831 tree i = create_tmp_var (TREE_TYPE (v));
7832 tree ptype = build_pointer_type (TREE_TYPE (type));
7833 tree bias = TREE_OPERAND (d, 1);
7834 d = TREE_OPERAND (d, 0);
7835 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7837 tree b = TREE_OPERAND (d, 1);
7838 b = maybe_lookup_decl (b, ctx);
7839 if (b == NULL)
7841 b = TREE_OPERAND (d, 1);
7842 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7844 if (integer_zerop (bias))
7845 bias = b;
7846 else
7848 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7849 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7850 TREE_TYPE (b), b, bias);
7852 d = TREE_OPERAND (d, 0);
7854 /* For ref build_outer_var_ref already performs this, so
7855 only new_var needs a dereference. */
7856 if (TREE_CODE (d) == INDIRECT_REF)
7858 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7859 gcc_assert (omp_privatize_by_reference (var)
7860 && var == orig_var);
7862 else if (TREE_CODE (d) == ADDR_EXPR)
7864 if (orig_var == var)
7866 new_var = build_fold_addr_expr (new_var);
7867 ref = build_fold_addr_expr (ref);
7870 else
7872 gcc_assert (orig_var == var);
7873 if (omp_privatize_by_reference (var))
7874 ref = build_fold_addr_expr (ref);
7876 if (DECL_P (v))
7878 tree t = maybe_lookup_decl (v, ctx);
7879 if (t)
7880 v = t;
7881 else
7882 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7883 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7885 if (!integer_zerop (bias))
7887 bias = fold_convert_loc (clause_loc, sizetype, bias);
7888 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7889 TREE_TYPE (new_var), new_var,
7890 unshare_expr (bias));
7891 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7892 TREE_TYPE (ref), ref, bias);
7894 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7895 ref = fold_convert_loc (clause_loc, ptype, ref);
7896 tree m = create_tmp_var (ptype);
7897 gimplify_assign (m, new_var, stmt_seqp);
7898 new_var = m;
7899 m = create_tmp_var (ptype);
7900 gimplify_assign (m, ref, stmt_seqp);
7901 ref = m;
7902 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7903 tree body = create_artificial_label (UNKNOWN_LOCATION);
7904 tree end = create_artificial_label (UNKNOWN_LOCATION);
7905 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7906 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7907 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7908 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7910 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7911 tree decl_placeholder
7912 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7913 SET_DECL_VALUE_EXPR (placeholder, out);
7914 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7915 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7916 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7917 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7918 gimple_seq_add_seq (&sub_seq,
7919 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7920 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7921 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7922 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7924 else
7926 tree out2 = out;
7927 tree priv2 = priv;
7928 if (is_truth_op)
7930 tree zero = build_zero_cst (TREE_TYPE (out));
7931 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7932 boolean_type_node, out, zero);
7933 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7934 boolean_type_node, priv, zero);
7936 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7937 if (is_truth_op)
7938 x = fold_convert (TREE_TYPE (out), x);
7939 out = unshare_expr (out);
7940 gimplify_assign (out, x, &sub_seq);
7942 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7943 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7944 gimple_seq_add_stmt (&sub_seq, g);
7945 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7946 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7947 gimple_seq_add_stmt (&sub_seq, g);
7948 g = gimple_build_assign (i, PLUS_EXPR, i,
7949 build_int_cst (TREE_TYPE (i), 1));
7950 gimple_seq_add_stmt (&sub_seq, g);
7951 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7952 gimple_seq_add_stmt (&sub_seq, g);
7953 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7955 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7957 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7959 if (omp_privatize_by_reference (var)
7960 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7961 TREE_TYPE (ref)))
7962 ref = build_fold_addr_expr_loc (clause_loc, ref);
7963 SET_DECL_VALUE_EXPR (placeholder, ref);
7964 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7965 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7966 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7967 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7968 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7970 else
7972 tree new_var2 = new_var;
7973 tree ref2 = ref;
7974 if (is_truth_op)
7976 tree zero = build_zero_cst (TREE_TYPE (new_var));
7977 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7978 boolean_type_node, new_var, zero);
7979 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7980 ref, zero);
7982 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7983 if (is_truth_op)
7984 x = fold_convert (TREE_TYPE (new_var), x);
7985 ref = build_outer_var_ref (var, ctx);
7986 gimplify_assign (ref, x, &sub_seq);
7990 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7992 gimple_seq_add_stmt (stmt_seqp, stmt);
7994 gimple_seq_add_seq (stmt_seqp, sub_seq);
7996 if (clist)
7998 gimple_seq_add_seq (stmt_seqp, *clist);
7999 *clist = NULL;
8002 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
8004 gimple_seq_add_stmt (stmt_seqp, stmt);
8008 /* Generate code to implement the COPYPRIVATE clauses. */
8010 static void
8011 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
8012 omp_context *ctx)
8014 tree c;
8016 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8018 tree var, new_var, ref, x;
8019 bool by_ref;
8020 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8022 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
8023 continue;
8025 var = OMP_CLAUSE_DECL (c);
8026 by_ref = use_pointer_for_field (var, NULL);
8028 ref = build_sender_ref (var, ctx);
8029 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
8030 if (by_ref)
8032 x = build_fold_addr_expr_loc (clause_loc, new_var);
8033 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
8035 gimplify_assign (ref, x, slist);
8037 ref = build_receiver_ref (var, false, ctx);
8038 if (by_ref)
8040 ref = fold_convert_loc (clause_loc,
8041 build_pointer_type (TREE_TYPE (new_var)),
8042 ref);
8043 ref = build_fold_indirect_ref_loc (clause_loc, ref);
8045 if (omp_privatize_by_reference (var))
8047 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
8048 ref = build_simple_mem_ref_loc (clause_loc, ref);
8049 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
8051 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
8052 gimplify_and_add (x, rlist);
8057 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
8058 and REDUCTION from the sender (aka parent) side. */
8060 static void
8061 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
8062 omp_context *ctx)
8064 tree c, t;
8065 int ignored_looptemp = 0;
8066 bool is_taskloop = false;
8068 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
8069 by GOMP_taskloop. */
8070 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
8072 ignored_looptemp = 2;
8073 is_taskloop = true;
8076 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8078 tree val, ref, x, var;
8079 bool by_ref, do_in = false, do_out = false;
8080 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8082 switch (OMP_CLAUSE_CODE (c))
8084 case OMP_CLAUSE_PRIVATE:
8085 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
8086 break;
8087 continue;
8088 case OMP_CLAUSE_FIRSTPRIVATE:
8089 case OMP_CLAUSE_COPYIN:
8090 case OMP_CLAUSE_LASTPRIVATE:
8091 case OMP_CLAUSE_IN_REDUCTION:
8092 case OMP_CLAUSE__REDUCTEMP_:
8093 break;
8094 case OMP_CLAUSE_REDUCTION:
8095 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
8096 continue;
8097 break;
8098 case OMP_CLAUSE_SHARED:
8099 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8100 break;
8101 continue;
8102 case OMP_CLAUSE__LOOPTEMP_:
8103 if (ignored_looptemp)
8105 ignored_looptemp--;
8106 continue;
8108 break;
8109 default:
8110 continue;
8113 val = OMP_CLAUSE_DECL (c);
8114 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8115 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
8116 && TREE_CODE (val) == MEM_REF)
8118 val = TREE_OPERAND (val, 0);
8119 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
8120 val = TREE_OPERAND (val, 0);
8121 if (TREE_CODE (val) == INDIRECT_REF
8122 || TREE_CODE (val) == ADDR_EXPR)
8123 val = TREE_OPERAND (val, 0);
8124 if (is_variable_sized (val))
8125 continue;
8128 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8129 outer taskloop region. */
8130 omp_context *ctx_for_o = ctx;
8131 if (is_taskloop
8132 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8133 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8134 ctx_for_o = ctx->outer;
8136 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
8138 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8139 && is_global_var (var)
8140 && (val == OMP_CLAUSE_DECL (c)
8141 || !is_task_ctx (ctx)
8142 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8143 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8144 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8145 != POINTER_TYPE)))))
8146 continue;
8148 t = omp_member_access_dummy_var (var);
8149 if (t)
8151 var = DECL_VALUE_EXPR (var);
8152 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
8153 if (o != t)
8154 var = unshare_and_remap (var, t, o);
8155 else
8156 var = unshare_expr (var);
8159 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8161 /* Handle taskloop firstprivate/lastprivate, where the
8162 lastprivate on GIMPLE_OMP_TASK is represented as
8163 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8164 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
8165 x = omp_build_component_ref (ctx->sender_decl, f);
8166 if (use_pointer_for_field (val, ctx))
8167 var = build_fold_addr_expr (var);
8168 gimplify_assign (x, var, ilist);
8169 DECL_ABSTRACT_ORIGIN (f) = NULL;
8170 continue;
8173 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8174 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8175 || val == OMP_CLAUSE_DECL (c))
8176 && is_variable_sized (val))
8177 continue;
8178 by_ref = use_pointer_for_field (val, NULL);
8180 switch (OMP_CLAUSE_CODE (c))
8182 case OMP_CLAUSE_FIRSTPRIVATE:
8183 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8184 && !by_ref
8185 && is_task_ctx (ctx))
8186 suppress_warning (var);
8187 do_in = true;
8188 break;
8190 case OMP_CLAUSE_PRIVATE:
8191 case OMP_CLAUSE_COPYIN:
8192 case OMP_CLAUSE__LOOPTEMP_:
8193 case OMP_CLAUSE__REDUCTEMP_:
8194 do_in = true;
8195 break;
8197 case OMP_CLAUSE_LASTPRIVATE:
8198 if (by_ref || omp_privatize_by_reference (val))
8200 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8201 continue;
8202 do_in = true;
8204 else
8206 do_out = true;
8207 if (lang_hooks.decls.omp_private_outer_ref (val))
8208 do_in = true;
8210 break;
8212 case OMP_CLAUSE_REDUCTION:
8213 case OMP_CLAUSE_IN_REDUCTION:
8214 do_in = true;
8215 if (val == OMP_CLAUSE_DECL (c))
8217 if (is_task_ctx (ctx))
8218 by_ref = use_pointer_for_field (val, ctx);
8219 else
8220 do_out = !(by_ref || omp_privatize_by_reference (val));
8222 else
8223 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8224 break;
8226 default:
8227 gcc_unreachable ();
8230 if (do_in)
8232 ref = build_sender_ref (val, ctx);
8233 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8234 gimplify_assign (ref, x, ilist);
8235 if (is_task_ctx (ctx))
8236 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8239 if (do_out)
8241 ref = build_sender_ref (val, ctx);
8242 gimplify_assign (var, ref, olist);
8247 /* Generate code to implement SHARED from the sender (aka parent)
8248 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8249 list things that got automatically shared. */
8251 static void
8252 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8254 tree var, ovar, nvar, t, f, x, record_type;
8256 if (ctx->record_type == NULL)
8257 return;
8259 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8260 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8262 ovar = DECL_ABSTRACT_ORIGIN (f);
8263 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8264 continue;
8266 nvar = maybe_lookup_decl (ovar, ctx);
8267 if (!nvar
8268 || !DECL_HAS_VALUE_EXPR_P (nvar)
8269 || (ctx->allocate_map
8270 && ctx->allocate_map->get (ovar)))
8271 continue;
8273 /* If CTX is a nested parallel directive. Find the immediately
8274 enclosing parallel or workshare construct that contains a
8275 mapping for OVAR. */
8276 var = lookup_decl_in_outer_ctx (ovar, ctx);
8278 t = omp_member_access_dummy_var (var);
8279 if (t)
8281 var = DECL_VALUE_EXPR (var);
8282 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8283 if (o != t)
8284 var = unshare_and_remap (var, t, o);
8285 else
8286 var = unshare_expr (var);
8289 if (use_pointer_for_field (ovar, ctx))
8291 x = build_sender_ref (ovar, ctx);
8292 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8293 && TREE_TYPE (f) == TREE_TYPE (ovar))
8295 gcc_assert (is_parallel_ctx (ctx)
8296 && DECL_ARTIFICIAL (ovar));
8297 /* _condtemp_ clause. */
8298 var = build_constructor (TREE_TYPE (x), NULL);
8300 else
8301 var = build_fold_addr_expr (var);
8302 gimplify_assign (x, var, ilist);
8304 else
8306 x = build_sender_ref (ovar, ctx);
8307 gimplify_assign (x, var, ilist);
8309 if (!TREE_READONLY (var)
8310 /* We don't need to receive a new reference to a result
8311 or parm decl. In fact we may not store to it as we will
8312 invalidate any pending RSO and generate wrong gimple
8313 during inlining. */
8314 && !((TREE_CODE (var) == RESULT_DECL
8315 || TREE_CODE (var) == PARM_DECL)
8316 && DECL_BY_REFERENCE (var)))
8318 x = build_sender_ref (ovar, ctx);
8319 gimplify_assign (var, x, olist);
8325 /* Emit an OpenACC head marker call, encapulating the partitioning and
8326 other information that must be processed by the target compiler.
8327 Return the maximum number of dimensions the associated loop might
8328 be partitioned over. */
8330 static unsigned
8331 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8332 gimple_seq *seq, omp_context *ctx)
8334 unsigned levels = 0;
8335 unsigned tag = 0;
8336 tree gang_static = NULL_TREE;
8337 auto_vec<tree, 5> args;
8339 args.quick_push (build_int_cst
8340 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8341 args.quick_push (ddvar);
8342 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8344 switch (OMP_CLAUSE_CODE (c))
8346 case OMP_CLAUSE_GANG:
8347 tag |= OLF_DIM_GANG;
8348 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8349 /* static:* is represented by -1, and we can ignore it, as
8350 scheduling is always static. */
8351 if (gang_static && integer_minus_onep (gang_static))
8352 gang_static = NULL_TREE;
8353 levels++;
8354 break;
8356 case OMP_CLAUSE_WORKER:
8357 tag |= OLF_DIM_WORKER;
8358 levels++;
8359 break;
8361 case OMP_CLAUSE_VECTOR:
8362 tag |= OLF_DIM_VECTOR;
8363 levels++;
8364 break;
8366 case OMP_CLAUSE_SEQ:
8367 tag |= OLF_SEQ;
8368 break;
8370 case OMP_CLAUSE_AUTO:
8371 tag |= OLF_AUTO;
8372 break;
8374 case OMP_CLAUSE_INDEPENDENT:
8375 tag |= OLF_INDEPENDENT;
8376 break;
8378 case OMP_CLAUSE_TILE:
8379 tag |= OLF_TILE;
8380 break;
8382 case OMP_CLAUSE_REDUCTION:
8383 tag |= OLF_REDUCTION;
8384 break;
8386 default:
8387 continue;
8391 if (gang_static)
8393 if (DECL_P (gang_static))
8394 gang_static = build_outer_var_ref (gang_static, ctx);
8395 tag |= OLF_GANG_STATIC;
8398 omp_context *tgt = enclosing_target_ctx (ctx);
8399 if (!tgt || is_oacc_parallel_or_serial (tgt))
8401 else if (is_oacc_kernels (tgt))
8402 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8403 gcc_unreachable ();
8404 else if (is_oacc_kernels_decomposed_part (tgt))
8406 else
8407 gcc_unreachable ();
8409 /* In a parallel region, loops are implicitly INDEPENDENT. */
8410 if (!tgt || is_oacc_parallel_or_serial (tgt))
8411 tag |= OLF_INDEPENDENT;
8413 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8414 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8415 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8417 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8418 gcc_assert (!(tag & OLF_AUTO));
8421 if (tag & OLF_TILE)
8422 /* Tiling could use all 3 levels. */
8423 levels = 3;
8424 else
8426 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8427 Ensure at least one level, or 2 for possible auto
8428 partitioning */
8429 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8430 << OLF_DIM_BASE) | OLF_SEQ));
8432 if (levels < 1u + maybe_auto)
8433 levels = 1u + maybe_auto;
8436 args.quick_push (build_int_cst (integer_type_node, levels));
8437 args.quick_push (build_int_cst (integer_type_node, tag));
8438 if (gang_static)
8439 args.quick_push (gang_static);
8441 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8442 gimple_set_location (call, loc);
8443 gimple_set_lhs (call, ddvar);
8444 gimple_seq_add_stmt (seq, call);
8446 return levels;
8449 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8450 partitioning level of the enclosed region. */
8452 static void
8453 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8454 tree tofollow, gimple_seq *seq)
8456 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8457 : IFN_UNIQUE_OACC_TAIL_MARK);
8458 tree marker = build_int_cst (integer_type_node, marker_kind);
8459 int nargs = 2 + (tofollow != NULL_TREE);
8460 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8461 marker, ddvar, tofollow);
8462 gimple_set_location (call, loc);
8463 gimple_set_lhs (call, ddvar);
8464 gimple_seq_add_stmt (seq, call);
8467 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8468 the loop clauses, from which we extract reductions. Initialize
8469 HEAD and TAIL. */
8471 static void
8472 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8473 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8475 bool inner = false;
8476 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8477 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8479 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8481 if (private_marker)
8483 gimple_set_location (private_marker, loc);
8484 gimple_call_set_lhs (private_marker, ddvar);
8485 gimple_call_set_arg (private_marker, 1, ddvar);
8488 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8489 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8491 gcc_assert (count);
8492 for (unsigned done = 1; count; count--, done++)
8494 gimple_seq fork_seq = NULL;
8495 gimple_seq join_seq = NULL;
8497 tree place = build_int_cst (integer_type_node, -1);
8498 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8499 fork_kind, ddvar, place);
8500 gimple_set_location (fork, loc);
8501 gimple_set_lhs (fork, ddvar);
8503 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8504 join_kind, ddvar, place);
8505 gimple_set_location (join, loc);
8506 gimple_set_lhs (join, ddvar);
8508 /* Mark the beginning of this level sequence. */
8509 if (inner)
8510 lower_oacc_loop_marker (loc, ddvar, true,
8511 build_int_cst (integer_type_node, count),
8512 &fork_seq);
8513 lower_oacc_loop_marker (loc, ddvar, false,
8514 build_int_cst (integer_type_node, done),
8515 &join_seq);
8517 lower_oacc_reductions (loc, clauses, place, inner,
8518 fork, (count == 1) ? private_marker : NULL,
8519 join, &fork_seq, &join_seq, ctx);
8521 /* Append this level to head. */
8522 gimple_seq_add_seq (head, fork_seq);
8523 /* Prepend it to tail. */
8524 gimple_seq_add_seq (&join_seq, *tail);
8525 *tail = join_seq;
8527 inner = true;
8530 /* Mark the end of the sequence. */
8531 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8532 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8535 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8536 catch handler and return it. This prevents programs from violating the
8537 structured block semantics with throws. */
8539 static gimple_seq
8540 maybe_catch_exception (gimple_seq body)
8542 gimple *g;
8543 tree decl;
8545 if (!flag_exceptions)
8546 return body;
8548 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8549 decl = lang_hooks.eh_protect_cleanup_actions ();
8550 else
8551 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8553 g = gimple_build_eh_must_not_throw (decl);
8554 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8555 GIMPLE_TRY_CATCH);
8557 return gimple_seq_alloc_with_stmt (g);
8561 /* Routines to lower OMP directives into OMP-GIMPLE. */
8563 /* If ctx is a worksharing context inside of a cancellable parallel
8564 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8565 and conditional branch to parallel's cancel_label to handle
8566 cancellation in the implicit barrier. */
8568 static void
8569 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8570 gimple_seq *body)
8572 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8573 if (gimple_omp_return_nowait_p (omp_return))
8574 return;
8575 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8576 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8577 && outer->cancellable)
8579 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8580 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8581 tree lhs = create_tmp_var (c_bool_type);
8582 gimple_omp_return_set_lhs (omp_return, lhs);
8583 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8584 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8585 fold_convert (c_bool_type,
8586 boolean_false_node),
8587 outer->cancel_label, fallthru_label);
8588 gimple_seq_add_stmt (body, g);
8589 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8591 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8592 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8593 return;
8596 /* Find the first task_reduction or reduction clause or return NULL
8597 if there are none. */
8599 static inline tree
8600 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8601 enum omp_clause_code ccode)
8603 while (1)
8605 clauses = omp_find_clause (clauses, ccode);
8606 if (clauses == NULL_TREE)
8607 return NULL_TREE;
8608 if (ccode != OMP_CLAUSE_REDUCTION
8609 || code == OMP_TASKLOOP
8610 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8611 return clauses;
8612 clauses = OMP_CLAUSE_CHAIN (clauses);
8616 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8617 gimple_seq *, gimple_seq *);
8619 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8620 CTX is the enclosing OMP context for the current statement. */
8622 static void
8623 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8625 tree block, control;
8626 gimple_stmt_iterator tgsi;
8627 gomp_sections *stmt;
8628 gimple *t;
8629 gbind *new_stmt, *bind;
8630 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8632 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8634 push_gimplify_context ();
8636 dlist = NULL;
8637 ilist = NULL;
8639 tree rclauses
8640 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8641 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8642 tree rtmp = NULL_TREE;
8643 if (rclauses)
8645 tree type = build_pointer_type (pointer_sized_int_node);
8646 tree temp = create_tmp_var (type);
8647 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8648 OMP_CLAUSE_DECL (c) = temp;
8649 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8650 gimple_omp_sections_set_clauses (stmt, c);
8651 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8652 gimple_omp_sections_clauses (stmt),
8653 &ilist, &tred_dlist);
8654 rclauses = c;
8655 rtmp = make_ssa_name (type);
8656 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8659 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8660 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8662 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8663 &ilist, &dlist, ctx, NULL);
8665 control = create_tmp_var (unsigned_type_node, ".section");
8666 gimple_omp_sections_set_control (stmt, control);
8668 new_body = gimple_omp_body (stmt);
8669 gimple_omp_set_body (stmt, NULL);
8670 tgsi = gsi_start (new_body);
8671 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8673 omp_context *sctx;
8674 gimple *sec_start;
8676 sec_start = gsi_stmt (tgsi);
8677 sctx = maybe_lookup_ctx (sec_start);
8678 gcc_assert (sctx);
8680 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8681 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8682 GSI_CONTINUE_LINKING);
8683 gimple_omp_set_body (sec_start, NULL);
8685 if (gsi_one_before_end_p (tgsi))
8687 gimple_seq l = NULL;
8688 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8689 &ilist, &l, &clist, ctx);
8690 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8691 gimple_omp_section_set_last (sec_start);
8694 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8695 GSI_CONTINUE_LINKING);
8698 block = make_node (BLOCK);
8699 bind = gimple_build_bind (NULL, new_body, block);
8701 olist = NULL;
8702 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8703 &clist, ctx);
8704 if (clist)
8706 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8707 gcall *g = gimple_build_call (fndecl, 0);
8708 gimple_seq_add_stmt (&olist, g);
8709 gimple_seq_add_seq (&olist, clist);
8710 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8711 g = gimple_build_call (fndecl, 0);
8712 gimple_seq_add_stmt (&olist, g);
8715 block = make_node (BLOCK);
8716 new_stmt = gimple_build_bind (NULL, NULL, block);
8717 gsi_replace (gsi_p, new_stmt, true);
8719 pop_gimplify_context (new_stmt);
8720 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8721 BLOCK_VARS (block) = gimple_bind_vars (bind);
8722 if (BLOCK_VARS (block))
8723 TREE_USED (block) = 1;
8725 new_body = NULL;
8726 gimple_seq_add_seq (&new_body, ilist);
8727 gimple_seq_add_stmt (&new_body, stmt);
8728 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8729 gimple_seq_add_stmt (&new_body, bind);
8731 t = gimple_build_omp_continue (control, control);
8732 gimple_seq_add_stmt (&new_body, t);
8734 gimple_seq_add_seq (&new_body, olist);
8735 if (ctx->cancellable)
8736 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8737 gimple_seq_add_seq (&new_body, dlist);
8739 new_body = maybe_catch_exception (new_body);
8741 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8742 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8743 t = gimple_build_omp_return (nowait);
8744 gimple_seq_add_stmt (&new_body, t);
8745 gimple_seq_add_seq (&new_body, tred_dlist);
8746 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8748 if (rclauses)
8749 OMP_CLAUSE_DECL (rclauses) = rtmp;
8751 gimple_bind_set_body (new_stmt, new_body);
8755 /* A subroutine of lower_omp_single. Expand the simple form of
8756 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8758 if (GOMP_single_start ())
8759 BODY;
8760 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8762 FIXME. It may be better to delay expanding the logic of this until
8763 pass_expand_omp. The expanded logic may make the job more difficult
8764 to a synchronization analysis pass. */
8766 static void
8767 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8769 location_t loc = gimple_location (single_stmt);
8770 tree tlabel = create_artificial_label (loc);
8771 tree flabel = create_artificial_label (loc);
8772 gimple *call, *cond;
8773 tree lhs, decl;
8775 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8776 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8777 call = gimple_build_call (decl, 0);
8778 gimple_call_set_lhs (call, lhs);
8779 gimple_seq_add_stmt (pre_p, call);
8781 cond = gimple_build_cond (EQ_EXPR, lhs,
8782 fold_convert_loc (loc, TREE_TYPE (lhs),
8783 boolean_true_node),
8784 tlabel, flabel);
8785 gimple_seq_add_stmt (pre_p, cond);
8786 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8787 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8788 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8792 /* A subroutine of lower_omp_single. Expand the simple form of
8793 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8795 #pragma omp single copyprivate (a, b, c)
8797 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8800 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8802 BODY;
8803 copyout.a = a;
8804 copyout.b = b;
8805 copyout.c = c;
8806 GOMP_single_copy_end (&copyout);
8808 else
8810 a = copyout_p->a;
8811 b = copyout_p->b;
8812 c = copyout_p->c;
8814 GOMP_barrier ();
8817 FIXME. It may be better to delay expanding the logic of this until
8818 pass_expand_omp. The expanded logic may make the job more difficult
8819 to a synchronization analysis pass. */
8821 static void
8822 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8823 omp_context *ctx)
8825 tree ptr_type, t, l0, l1, l2, bfn_decl;
8826 gimple_seq copyin_seq;
8827 location_t loc = gimple_location (single_stmt);
8829 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8831 ptr_type = build_pointer_type (ctx->record_type);
8832 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8834 l0 = create_artificial_label (loc);
8835 l1 = create_artificial_label (loc);
8836 l2 = create_artificial_label (loc);
8838 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8839 t = build_call_expr_loc (loc, bfn_decl, 0);
8840 t = fold_convert_loc (loc, ptr_type, t);
8841 gimplify_assign (ctx->receiver_decl, t, pre_p);
8843 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8844 build_int_cst (ptr_type, 0));
8845 t = build3 (COND_EXPR, void_type_node, t,
8846 build_and_jump (&l0), build_and_jump (&l1));
8847 gimplify_and_add (t, pre_p);
8849 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8851 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8853 copyin_seq = NULL;
8854 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8855 &copyin_seq, ctx);
8857 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8858 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8859 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8860 gimplify_and_add (t, pre_p);
8862 t = build_and_jump (&l2);
8863 gimplify_and_add (t, pre_p);
8865 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8867 gimple_seq_add_seq (pre_p, copyin_seq);
8869 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8873 /* Expand code for an OpenMP single directive. */
8875 static void
8876 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8878 tree block;
8879 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8880 gbind *bind;
8881 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8883 push_gimplify_context ();
8885 block = make_node (BLOCK);
8886 bind = gimple_build_bind (NULL, NULL, block);
8887 gsi_replace (gsi_p, bind, true);
8888 bind_body = NULL;
8889 dlist = NULL;
8890 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8891 &bind_body, &dlist, ctx, NULL);
8892 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8894 gimple_seq_add_stmt (&bind_body, single_stmt);
8896 if (ctx->record_type)
8897 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8898 else
8899 lower_omp_single_simple (single_stmt, &bind_body);
8901 gimple_omp_set_body (single_stmt, NULL);
8903 gimple_seq_add_seq (&bind_body, dlist);
8905 bind_body = maybe_catch_exception (bind_body);
8907 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8908 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8909 gimple *g = gimple_build_omp_return (nowait);
8910 gimple_seq_add_stmt (&bind_body_tail, g);
8911 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8912 if (ctx->record_type)
8914 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8915 tree clobber = build_clobber (ctx->record_type);
8916 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8917 clobber), GSI_SAME_STMT);
8919 gimple_seq_add_seq (&bind_body, bind_body_tail);
8920 gimple_bind_set_body (bind, bind_body);
8922 pop_gimplify_context (bind);
8924 gimple_bind_append_vars (bind, ctx->block_vars);
8925 BLOCK_VARS (block) = ctx->block_vars;
8926 if (BLOCK_VARS (block))
8927 TREE_USED (block) = 1;
8931 /* Lower code for an OMP scope directive. */
8933 static void
8934 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8936 tree block;
8937 gimple *scope_stmt = gsi_stmt (*gsi_p);
8938 gbind *bind;
8939 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8940 gimple_seq tred_dlist = NULL;
8942 push_gimplify_context ();
8944 block = make_node (BLOCK);
8945 bind = gimple_build_bind (NULL, NULL, block);
8946 gsi_replace (gsi_p, bind, true);
8947 bind_body = NULL;
8948 dlist = NULL;
8950 tree rclauses
8951 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8952 OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8953 if (rclauses)
8955 tree type = build_pointer_type (pointer_sized_int_node);
8956 tree temp = create_tmp_var (type);
8957 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8958 OMP_CLAUSE_DECL (c) = temp;
8959 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8960 gimple_omp_scope_set_clauses (scope_stmt, c);
8961 lower_omp_task_reductions (ctx, OMP_SCOPE,
8962 gimple_omp_scope_clauses (scope_stmt),
8963 &bind_body, &tred_dlist);
8964 rclauses = c;
8965 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8966 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8967 gimple_seq_add_stmt (&bind_body, stmt);
8970 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8971 &bind_body, &dlist, ctx, NULL);
8972 lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8974 gimple_seq_add_stmt (&bind_body, scope_stmt);
8976 gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8978 gimple_omp_set_body (scope_stmt, NULL);
8980 gimple_seq clist = NULL;
8981 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8982 &bind_body, &clist, ctx);
8983 if (clist)
8985 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8986 gcall *g = gimple_build_call (fndecl, 0);
8987 gimple_seq_add_stmt (&bind_body, g);
8988 gimple_seq_add_seq (&bind_body, clist);
8989 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8990 g = gimple_build_call (fndecl, 0);
8991 gimple_seq_add_stmt (&bind_body, g);
8994 gimple_seq_add_seq (&bind_body, dlist);
8996 bind_body = maybe_catch_exception (bind_body);
8998 bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
8999 OMP_CLAUSE_NOWAIT) != NULL_TREE;
9000 gimple *g = gimple_build_omp_return (nowait);
9001 gimple_seq_add_stmt (&bind_body_tail, g);
9002 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
9003 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
9004 if (ctx->record_type)
9006 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
9007 tree clobber = build_clobber (ctx->record_type);
9008 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
9009 clobber), GSI_SAME_STMT);
9011 gimple_seq_add_seq (&bind_body, bind_body_tail);
9013 gimple_bind_set_body (bind, bind_body);
9015 pop_gimplify_context (bind);
9017 gimple_bind_append_vars (bind, ctx->block_vars);
9018 BLOCK_VARS (block) = ctx->block_vars;
9019 if (BLOCK_VARS (block))
9020 TREE_USED (block) = 1;
9022 /* Expand code for an OpenMP master or masked directive. */
9024 static void
9025 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9027 tree block, lab = NULL, x, bfn_decl;
9028 gimple *stmt = gsi_stmt (*gsi_p);
9029 gbind *bind;
9030 location_t loc = gimple_location (stmt);
9031 gimple_seq tseq;
9032 tree filter = integer_zero_node;
9034 push_gimplify_context ();
9036 if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
9038 filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
9039 OMP_CLAUSE_FILTER);
9040 if (filter)
9041 filter = fold_convert (integer_type_node,
9042 OMP_CLAUSE_FILTER_EXPR (filter));
9043 else
9044 filter = integer_zero_node;
9046 block = make_node (BLOCK);
9047 bind = gimple_build_bind (NULL, NULL, block);
9048 gsi_replace (gsi_p, bind, true);
9049 gimple_bind_add_stmt (bind, stmt);
9051 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9052 x = build_call_expr_loc (loc, bfn_decl, 0);
9053 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
9054 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
9055 tseq = NULL;
9056 gimplify_and_add (x, &tseq);
9057 gimple_bind_add_seq (bind, tseq);
9059 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9060 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9061 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9062 gimple_omp_set_body (stmt, NULL);
9064 gimple_bind_add_stmt (bind, gimple_build_label (lab));
9066 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9068 pop_gimplify_context (bind);
9070 gimple_bind_append_vars (bind, ctx->block_vars);
9071 BLOCK_VARS (block) = ctx->block_vars;
9074 /* Helper function for lower_omp_task_reductions. For a specific PASS
9075 find out the current clause it should be processed, or return false
9076 if all have been processed already. */
9078 static inline bool
9079 omp_task_reduction_iterate (int pass, enum tree_code code,
9080 enum omp_clause_code ccode, tree *c, tree *decl,
9081 tree *type, tree *next)
9083 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
9085 if (ccode == OMP_CLAUSE_REDUCTION
9086 && code != OMP_TASKLOOP
9087 && !OMP_CLAUSE_REDUCTION_TASK (*c))
9088 continue;
9089 *decl = OMP_CLAUSE_DECL (*c);
9090 *type = TREE_TYPE (*decl);
9091 if (TREE_CODE (*decl) == MEM_REF)
9093 if (pass != 1)
9094 continue;
9096 else
9098 if (omp_privatize_by_reference (*decl))
9099 *type = TREE_TYPE (*type);
9100 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
9101 continue;
9103 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
9104 return true;
9106 *decl = NULL_TREE;
9107 *type = NULL_TREE;
9108 *next = NULL_TREE;
9109 return false;
9112 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9113 OMP_TASKGROUP only with task modifier). Register mapping of those in
9114 START sequence and reducing them and unregister them in the END sequence. */
9116 static void
9117 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
9118 gimple_seq *start, gimple_seq *end)
9120 enum omp_clause_code ccode
9121 = (code == OMP_TASKGROUP
9122 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
9123 tree cancellable = NULL_TREE;
9124 clauses = omp_task_reductions_find_first (clauses, code, ccode);
9125 if (clauses == NULL_TREE)
9126 return;
9127 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9129 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
9130 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
9131 && outer->cancellable)
9133 cancellable = error_mark_node;
9134 break;
9136 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
9137 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
9138 break;
9140 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
9141 tree *last = &TYPE_FIELDS (record_type);
9142 unsigned cnt = 0;
9143 if (cancellable)
9145 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9146 ptr_type_node);
9147 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9148 integer_type_node);
9149 *last = field;
9150 DECL_CHAIN (field) = ifield;
9151 last = &DECL_CHAIN (ifield);
9152 DECL_CONTEXT (field) = record_type;
9153 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9154 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9155 DECL_CONTEXT (ifield) = record_type;
9156 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9157 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9159 for (int pass = 0; pass < 2; pass++)
9161 tree decl, type, next;
9162 for (tree c = clauses;
9163 omp_task_reduction_iterate (pass, code, ccode,
9164 &c, &decl, &type, &next); c = next)
9166 ++cnt;
9167 tree new_type = type;
9168 if (ctx->outer)
9169 new_type = remap_type (type, &ctx->outer->cb);
9170 tree field
9171 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9172 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9173 new_type);
9174 if (DECL_P (decl) && type == TREE_TYPE (decl))
9176 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9177 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9178 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9180 else
9181 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9182 DECL_CONTEXT (field) = record_type;
9183 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9184 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9185 *last = field;
9186 last = &DECL_CHAIN (field);
9187 tree bfield
9188 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9189 boolean_type_node);
9190 DECL_CONTEXT (bfield) = record_type;
9191 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9192 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9193 *last = bfield;
9194 last = &DECL_CHAIN (bfield);
9197 *last = NULL_TREE;
9198 layout_type (record_type);
9200 /* Build up an array which registers with the runtime all the reductions
9201 and deregisters them at the end. Format documented in libgomp/task.c. */
9202 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9203 tree avar = create_tmp_var_raw (atype);
9204 gimple_add_tmp_var (avar);
9205 TREE_ADDRESSABLE (avar) = 1;
9206 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9207 NULL_TREE, NULL_TREE);
9208 tree t = build_int_cst (pointer_sized_int_node, cnt);
9209 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9210 gimple_seq seq = NULL;
9211 tree sz = fold_convert (pointer_sized_int_node,
9212 TYPE_SIZE_UNIT (record_type));
9213 int cachesz = 64;
9214 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9215 build_int_cst (pointer_sized_int_node, cachesz - 1));
9216 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9217 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9218 ctx->task_reductions.create (1 + cnt);
9219 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9220 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9221 ? sz : NULL_TREE);
9222 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9223 gimple_seq_add_seq (start, seq);
9224 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9225 NULL_TREE, NULL_TREE);
9226 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9227 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9228 NULL_TREE, NULL_TREE);
9229 t = build_int_cst (pointer_sized_int_node,
9230 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9231 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9232 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9233 NULL_TREE, NULL_TREE);
9234 t = build_int_cst (pointer_sized_int_node, -1);
9235 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9236 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9237 NULL_TREE, NULL_TREE);
9238 t = build_int_cst (pointer_sized_int_node, 0);
9239 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9241 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9242 and for each task reduction checks a bool right after the private variable
9243 within that thread's chunk; if the bool is clear, it hasn't been
9244 initialized and thus isn't going to be reduced nor destructed, otherwise
9245 reduce and destruct it. */
9246 tree idx = create_tmp_var (size_type_node);
9247 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9248 tree num_thr_sz = create_tmp_var (size_type_node);
9249 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9250 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9251 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9252 gimple *g;
9253 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9255 /* For worksharing constructs or scope, only perform it in the master
9256 thread, with the exception of cancelled implicit barriers - then only
9257 handle the current thread. */
9258 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9259 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9260 tree thr_num = create_tmp_var (integer_type_node);
9261 g = gimple_build_call (t, 0);
9262 gimple_call_set_lhs (g, thr_num);
9263 gimple_seq_add_stmt (end, g);
9264 if (cancellable)
9266 tree c;
9267 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9268 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9269 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9270 if (code == OMP_FOR)
9271 c = gimple_omp_for_clauses (ctx->stmt);
9272 else if (code == OMP_SECTIONS)
9273 c = gimple_omp_sections_clauses (ctx->stmt);
9274 else /* if (code == OMP_SCOPE) */
9275 c = gimple_omp_scope_clauses (ctx->stmt);
9276 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9277 cancellable = c;
9278 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9279 lab5, lab6);
9280 gimple_seq_add_stmt (end, g);
9281 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9282 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9283 gimple_seq_add_stmt (end, g);
9284 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9285 build_one_cst (TREE_TYPE (idx)));
9286 gimple_seq_add_stmt (end, g);
9287 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9288 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9290 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9291 gimple_seq_add_stmt (end, g);
9292 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9294 if (code != OMP_PARALLEL)
9296 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9297 tree num_thr = create_tmp_var (integer_type_node);
9298 g = gimple_build_call (t, 0);
9299 gimple_call_set_lhs (g, num_thr);
9300 gimple_seq_add_stmt (end, g);
9301 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9302 gimple_seq_add_stmt (end, g);
9303 if (cancellable)
9304 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9306 else
9308 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9309 OMP_CLAUSE__REDUCTEMP_);
9310 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9311 t = fold_convert (size_type_node, t);
9312 gimplify_assign (num_thr_sz, t, end);
9314 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9315 NULL_TREE, NULL_TREE);
9316 tree data = create_tmp_var (pointer_sized_int_node);
9317 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9318 if (code == OMP_TASKLOOP)
9320 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9321 g = gimple_build_cond (NE_EXPR, data,
9322 build_zero_cst (pointer_sized_int_node),
9323 lab1, lab7);
9324 gimple_seq_add_stmt (end, g);
9326 gimple_seq_add_stmt (end, gimple_build_label (lab1));
9327 tree ptr;
9328 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9329 ptr = create_tmp_var (build_pointer_type (record_type));
9330 else
9331 ptr = create_tmp_var (ptr_type_node);
9332 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9334 tree field = TYPE_FIELDS (record_type);
9335 cnt = 0;
9336 if (cancellable)
9337 field = DECL_CHAIN (DECL_CHAIN (field));
9338 for (int pass = 0; pass < 2; pass++)
9340 tree decl, type, next;
9341 for (tree c = clauses;
9342 omp_task_reduction_iterate (pass, code, ccode,
9343 &c, &decl, &type, &next); c = next)
9345 tree var = decl, ref;
9346 if (TREE_CODE (decl) == MEM_REF)
9348 var = TREE_OPERAND (var, 0);
9349 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9350 var = TREE_OPERAND (var, 0);
9351 tree v = var;
9352 if (TREE_CODE (var) == ADDR_EXPR)
9353 var = TREE_OPERAND (var, 0);
9354 else if (TREE_CODE (var) == INDIRECT_REF)
9355 var = TREE_OPERAND (var, 0);
9356 tree orig_var = var;
9357 if (is_variable_sized (var))
9359 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9360 var = DECL_VALUE_EXPR (var);
9361 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
9362 var = TREE_OPERAND (var, 0);
9363 gcc_assert (DECL_P (var));
9365 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9366 if (orig_var != var)
9367 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9368 else if (TREE_CODE (v) == ADDR_EXPR)
9369 t = build_fold_addr_expr (t);
9370 else if (TREE_CODE (v) == INDIRECT_REF)
9371 t = build_fold_indirect_ref (t);
9372 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9374 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9375 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9376 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9378 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9379 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9380 fold_convert (size_type_node,
9381 TREE_OPERAND (decl, 1)));
9383 else
9385 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9386 if (!omp_privatize_by_reference (decl))
9387 t = build_fold_addr_expr (t);
9389 t = fold_convert (pointer_sized_int_node, t);
9390 seq = NULL;
9391 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9392 gimple_seq_add_seq (start, seq);
9393 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9394 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9395 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9396 t = unshare_expr (byte_position (field));
9397 t = fold_convert (pointer_sized_int_node, t);
9398 ctx->task_reduction_map->put (c, cnt);
9399 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9400 ? t : NULL_TREE);
9401 seq = NULL;
9402 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9403 gimple_seq_add_seq (start, seq);
9404 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9405 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9406 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9408 tree bfield = DECL_CHAIN (field);
9409 tree cond;
9410 if (code == OMP_PARALLEL
9411 || code == OMP_FOR
9412 || code == OMP_SECTIONS
9413 || code == OMP_SCOPE)
9414 /* In parallel, worksharing or scope all threads unconditionally
9415 initialize all their task reduction private variables. */
9416 cond = boolean_true_node;
9417 else if (TREE_TYPE (ptr) == ptr_type_node)
9419 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9420 unshare_expr (byte_position (bfield)));
9421 seq = NULL;
9422 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9423 gimple_seq_add_seq (end, seq);
9424 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9425 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9426 build_int_cst (pbool, 0));
9428 else
9429 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9430 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9431 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9432 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9433 tree condv = create_tmp_var (boolean_type_node);
9434 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9435 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9436 lab3, lab4);
9437 gimple_seq_add_stmt (end, g);
9438 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9439 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9441 /* If this reduction doesn't need destruction and parallel
9442 has been cancelled, there is nothing to do for this
9443 reduction, so jump around the merge operation. */
9444 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9445 g = gimple_build_cond (NE_EXPR, cancellable,
9446 build_zero_cst (TREE_TYPE (cancellable)),
9447 lab4, lab5);
9448 gimple_seq_add_stmt (end, g);
9449 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9452 tree new_var;
9453 if (TREE_TYPE (ptr) == ptr_type_node)
9455 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9456 unshare_expr (byte_position (field)));
9457 seq = NULL;
9458 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9459 gimple_seq_add_seq (end, seq);
9460 tree pbool = build_pointer_type (TREE_TYPE (field));
9461 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9462 build_int_cst (pbool, 0));
9464 else
9465 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9466 build_simple_mem_ref (ptr), field, NULL_TREE);
9468 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9469 if (TREE_CODE (decl) != MEM_REF
9470 && omp_privatize_by_reference (decl))
9471 ref = build_simple_mem_ref (ref);
9472 /* reduction(-:var) sums up the partial results, so it acts
9473 identically to reduction(+:var). */
9474 if (rcode == MINUS_EXPR)
9475 rcode = PLUS_EXPR;
9476 if (TREE_CODE (decl) == MEM_REF)
9478 tree type = TREE_TYPE (new_var);
9479 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9480 tree i = create_tmp_var (TREE_TYPE (v));
9481 tree ptype = build_pointer_type (TREE_TYPE (type));
9482 if (DECL_P (v))
9484 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9485 tree vv = create_tmp_var (TREE_TYPE (v));
9486 gimplify_assign (vv, v, start);
9487 v = vv;
9489 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9490 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9491 new_var = build_fold_addr_expr (new_var);
9492 new_var = fold_convert (ptype, new_var);
9493 ref = fold_convert (ptype, ref);
9494 tree m = create_tmp_var (ptype);
9495 gimplify_assign (m, new_var, end);
9496 new_var = m;
9497 m = create_tmp_var (ptype);
9498 gimplify_assign (m, ref, end);
9499 ref = m;
9500 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9501 tree body = create_artificial_label (UNKNOWN_LOCATION);
9502 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9503 gimple_seq_add_stmt (end, gimple_build_label (body));
9504 tree priv = build_simple_mem_ref (new_var);
9505 tree out = build_simple_mem_ref (ref);
9506 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9508 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9509 tree decl_placeholder
9510 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9511 tree lab6 = NULL_TREE;
9512 if (cancellable)
9514 /* If this reduction needs destruction and parallel
9515 has been cancelled, jump around the merge operation
9516 to the destruction. */
9517 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9518 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9519 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9520 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9521 lab6, lab5);
9522 gimple_seq_add_stmt (end, g);
9523 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9525 SET_DECL_VALUE_EXPR (placeholder, out);
9526 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9527 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9528 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9529 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9530 gimple_seq_add_seq (end,
9531 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9532 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9533 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9535 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9536 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9538 if (cancellable)
9539 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9540 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9541 if (x)
9543 gimple_seq tseq = NULL;
9544 gimplify_stmt (&x, &tseq);
9545 gimple_seq_add_seq (end, tseq);
9548 else
9550 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9551 out = unshare_expr (out);
9552 gimplify_assign (out, x, end);
9554 gimple *g
9555 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9556 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9557 gimple_seq_add_stmt (end, g);
9558 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9559 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9560 gimple_seq_add_stmt (end, g);
9561 g = gimple_build_assign (i, PLUS_EXPR, i,
9562 build_int_cst (TREE_TYPE (i), 1));
9563 gimple_seq_add_stmt (end, g);
9564 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9565 gimple_seq_add_stmt (end, g);
9566 gimple_seq_add_stmt (end, gimple_build_label (endl));
9568 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9570 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9571 tree oldv = NULL_TREE;
9572 tree lab6 = NULL_TREE;
9573 if (cancellable)
9575 /* If this reduction needs destruction and parallel
9576 has been cancelled, jump around the merge operation
9577 to the destruction. */
9578 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9579 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9580 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9581 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9582 lab6, lab5);
9583 gimple_seq_add_stmt (end, g);
9584 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9586 if (omp_privatize_by_reference (decl)
9587 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9588 TREE_TYPE (ref)))
9589 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9590 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9591 tree refv = create_tmp_var (TREE_TYPE (ref));
9592 gimplify_assign (refv, ref, end);
9593 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9594 SET_DECL_VALUE_EXPR (placeholder, ref);
9595 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9596 tree d = maybe_lookup_decl (decl, ctx);
9597 gcc_assert (d);
9598 if (DECL_HAS_VALUE_EXPR_P (d))
9599 oldv = DECL_VALUE_EXPR (d);
9600 if (omp_privatize_by_reference (var))
9602 tree v = fold_convert (TREE_TYPE (d),
9603 build_fold_addr_expr (new_var));
9604 SET_DECL_VALUE_EXPR (d, v);
9606 else
9607 SET_DECL_VALUE_EXPR (d, new_var);
9608 DECL_HAS_VALUE_EXPR_P (d) = 1;
9609 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9610 if (oldv)
9611 SET_DECL_VALUE_EXPR (d, oldv);
9612 else
9614 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9615 DECL_HAS_VALUE_EXPR_P (d) = 0;
9617 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9618 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9619 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9620 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9621 if (cancellable)
9622 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9623 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9624 if (x)
9626 gimple_seq tseq = NULL;
9627 gimplify_stmt (&x, &tseq);
9628 gimple_seq_add_seq (end, tseq);
9631 else
9633 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9634 ref = unshare_expr (ref);
9635 gimplify_assign (ref, x, end);
9637 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9638 ++cnt;
9639 field = DECL_CHAIN (bfield);
9643 if (code == OMP_TASKGROUP)
9645 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9646 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9647 gimple_seq_add_stmt (start, g);
9649 else
9651 tree c;
9652 if (code == OMP_FOR)
9653 c = gimple_omp_for_clauses (ctx->stmt);
9654 else if (code == OMP_SECTIONS)
9655 c = gimple_omp_sections_clauses (ctx->stmt);
9656 else if (code == OMP_SCOPE)
9657 c = gimple_omp_scope_clauses (ctx->stmt);
9658 else
9659 c = gimple_omp_taskreg_clauses (ctx->stmt);
9660 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9661 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9662 build_fold_addr_expr (avar));
9663 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9666 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9667 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9668 size_one_node));
9669 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9670 gimple_seq_add_stmt (end, g);
9671 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9672 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9674 enum built_in_function bfn
9675 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9676 t = builtin_decl_explicit (bfn);
9677 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9678 tree arg;
9679 if (cancellable)
9681 arg = create_tmp_var (c_bool_type);
9682 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9683 cancellable));
9685 else
9686 arg = build_int_cst (c_bool_type, 0);
9687 g = gimple_build_call (t, 1, arg);
9689 else
9691 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9692 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9694 gimple_seq_add_stmt (end, g);
9695 if (lab7)
9696 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9697 t = build_constructor (atype, NULL);
9698 TREE_THIS_VOLATILE (t) = 1;
9699 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9702 /* Expand code for an OpenMP taskgroup directive. */
9704 static void
9705 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9707 gimple *stmt = gsi_stmt (*gsi_p);
9708 gcall *x;
9709 gbind *bind;
9710 gimple_seq dseq = NULL;
9711 tree block = make_node (BLOCK);
9713 bind = gimple_build_bind (NULL, NULL, block);
9714 gsi_replace (gsi_p, bind, true);
9715 gimple_bind_add_stmt (bind, stmt);
9717 push_gimplify_context ();
9719 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9721 gimple_bind_add_stmt (bind, x);
9723 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9724 gimple_omp_taskgroup_clauses (stmt),
9725 gimple_bind_body_ptr (bind), &dseq);
9727 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9728 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9729 gimple_omp_set_body (stmt, NULL);
9731 gimple_bind_add_seq (bind, dseq);
9733 pop_gimplify_context (bind);
9735 gimple_bind_append_vars (bind, ctx->block_vars);
9736 BLOCK_VARS (block) = ctx->block_vars;
9740 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9742 static void
9743 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9744 omp_context *ctx)
9746 struct omp_for_data fd;
9747 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9748 return;
9750 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9751 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9752 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9753 if (!fd.ordered)
9754 return;
9756 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9757 tree c = gimple_omp_ordered_clauses (ord_stmt);
9758 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
9759 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
9761 /* Merge depend clauses from multiple adjacent
9762 #pragma omp ordered depend(sink:...) constructs
9763 into one #pragma omp ordered depend(sink:...), so that
9764 we can optimize them together. */
9765 gimple_stmt_iterator gsi = *gsi_p;
9766 gsi_next (&gsi);
9767 while (!gsi_end_p (gsi))
9769 gimple *stmt = gsi_stmt (gsi);
9770 if (is_gimple_debug (stmt)
9771 || gimple_code (stmt) == GIMPLE_NOP)
9773 gsi_next (&gsi);
9774 continue;
9776 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9777 break;
9778 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9779 c = gimple_omp_ordered_clauses (ord_stmt2);
9780 if (c == NULL_TREE
9781 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS
9782 || OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK)
9783 break;
9784 while (*list_p)
9785 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9786 *list_p = c;
9787 gsi_remove (&gsi, true);
9791 /* Canonicalize sink dependence clauses into one folded clause if
9792 possible.
9794 The basic algorithm is to create a sink vector whose first
9795 element is the GCD of all the first elements, and whose remaining
9796 elements are the minimum of the subsequent columns.
9798 We ignore dependence vectors whose first element is zero because
9799 such dependencies are known to be executed by the same thread.
9801 We take into account the direction of the loop, so a minimum
9802 becomes a maximum if the loop is iterating forwards. We also
9803 ignore sink clauses where the loop direction is unknown, or where
9804 the offsets are clearly invalid because they are not a multiple
9805 of the loop increment.
9807 For example:
9809 #pragma omp for ordered(2)
9810 for (i=0; i < N; ++i)
9811 for (j=0; j < M; ++j)
9813 #pragma omp ordered \
9814 depend(sink:i-8,j-2) \
9815 depend(sink:i,j-1) \ // Completely ignored because i+0.
9816 depend(sink:i-4,j-3) \
9817 depend(sink:i-6,j-4)
9818 #pragma omp ordered depend(source)
9821 Folded clause is:
9823 depend(sink:-gcd(8,4,6),-min(2,3,4))
9824 -or-
9825 depend(sink:-2,-2)
9828 /* FIXME: Computing GCD's where the first element is zero is
9829 non-trivial in the presence of collapsed loops. Do this later. */
9830 if (fd.collapse > 1)
9831 return;
9833 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9835 /* wide_int is not a POD so it must be default-constructed. */
9836 for (unsigned i = 0; i != 2 * len - 1; ++i)
9837 new (static_cast<void*>(folded_deps + i)) wide_int ();
9839 tree folded_dep = NULL_TREE;
9840 /* TRUE if the first dimension's offset is negative. */
9841 bool neg_offset_p = false;
9843 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9844 unsigned int i;
9845 while ((c = *list_p) != NULL)
9847 bool remove = false;
9849 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS);
9850 if (OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK)
9851 goto next_ordered_clause;
9853 tree vec;
9854 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9855 vec && TREE_CODE (vec) == TREE_LIST;
9856 vec = TREE_CHAIN (vec), ++i)
9858 gcc_assert (i < len);
9860 /* omp_extract_for_data has canonicalized the condition. */
9861 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9862 || fd.loops[i].cond_code == GT_EXPR);
9863 bool forward = fd.loops[i].cond_code == LT_EXPR;
9864 bool maybe_lexically_later = true;
9866 /* While the committee makes up its mind, bail if we have any
9867 non-constant steps. */
9868 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9869 goto lower_omp_ordered_ret;
9871 tree itype = TREE_TYPE (TREE_VALUE (vec));
9872 if (POINTER_TYPE_P (itype))
9873 itype = sizetype;
9874 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9875 TYPE_PRECISION (itype),
9876 TYPE_SIGN (itype));
9878 /* Ignore invalid offsets that are not multiples of the step. */
9879 if (!wi::multiple_of_p (wi::abs (offset),
9880 wi::abs (wi::to_wide (fd.loops[i].step)),
9881 UNSIGNED))
9883 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9884 "ignoring sink clause with offset that is not "
9885 "a multiple of the loop step");
9886 remove = true;
9887 goto next_ordered_clause;
9890 /* Calculate the first dimension. The first dimension of
9891 the folded dependency vector is the GCD of the first
9892 elements, while ignoring any first elements whose offset
9893 is 0. */
9894 if (i == 0)
9896 /* Ignore dependence vectors whose first dimension is 0. */
9897 if (offset == 0)
9899 remove = true;
9900 goto next_ordered_clause;
9902 else
9904 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9906 error_at (OMP_CLAUSE_LOCATION (c),
9907 "first offset must be in opposite direction "
9908 "of loop iterations");
9909 goto lower_omp_ordered_ret;
9911 if (forward)
9912 offset = -offset;
9913 neg_offset_p = forward;
9914 /* Initialize the first time around. */
9915 if (folded_dep == NULL_TREE)
9917 folded_dep = c;
9918 folded_deps[0] = offset;
9920 else
9921 folded_deps[0] = wi::gcd (folded_deps[0],
9922 offset, UNSIGNED);
9925 /* Calculate minimum for the remaining dimensions. */
9926 else
9928 folded_deps[len + i - 1] = offset;
9929 if (folded_dep == c)
9930 folded_deps[i] = offset;
9931 else if (maybe_lexically_later
9932 && !wi::eq_p (folded_deps[i], offset))
9934 if (forward ^ wi::gts_p (folded_deps[i], offset))
9936 unsigned int j;
9937 folded_dep = c;
9938 for (j = 1; j <= i; j++)
9939 folded_deps[j] = folded_deps[len + j - 1];
9941 else
9942 maybe_lexically_later = false;
9946 gcc_assert (i == len);
9948 remove = true;
9950 next_ordered_clause:
9951 if (remove)
9952 *list_p = OMP_CLAUSE_CHAIN (c);
9953 else
9954 list_p = &OMP_CLAUSE_CHAIN (c);
9957 if (folded_dep)
9959 if (neg_offset_p)
9960 folded_deps[0] = -folded_deps[0];
9962 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9963 if (POINTER_TYPE_P (itype))
9964 itype = sizetype;
9966 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9967 = wide_int_to_tree (itype, folded_deps[0]);
9968 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9969 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9972 lower_omp_ordered_ret:
9974 /* Ordered without clauses is #pragma omp threads, while we want
9975 a nop instead if we remove all clauses. */
9976 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9977 gsi_replace (gsi_p, gimple_build_nop (), true);
9981 /* Expand code for an OpenMP ordered directive. */
9983 static void
9984 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9986 tree block;
9987 gimple *stmt = gsi_stmt (*gsi_p), *g;
9988 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9989 gcall *x;
9990 gbind *bind;
9991 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9992 OMP_CLAUSE_SIMD);
9993 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9994 loop. */
9995 bool maybe_simt
9996 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9997 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9998 OMP_CLAUSE_THREADS);
10000 if (gimple_omp_ordered_standalone_p (ord_stmt))
10002 /* FIXME: This is needs to be moved to the expansion to verify various
10003 conditions only testable on cfg with dominators computed, and also
10004 all the depend clauses to be merged still might need to be available
10005 for the runtime checks. */
10006 if (0)
10007 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
10008 return;
10011 push_gimplify_context ();
10013 block = make_node (BLOCK);
10014 bind = gimple_build_bind (NULL, NULL, block);
10015 gsi_replace (gsi_p, bind, true);
10016 gimple_bind_add_stmt (bind, stmt);
10018 if (simd)
10020 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
10021 build_int_cst (NULL_TREE, threads));
10022 cfun->has_simduid_loops = true;
10024 else
10025 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
10027 gimple_bind_add_stmt (bind, x);
10029 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
10030 if (maybe_simt)
10032 counter = create_tmp_var (integer_type_node);
10033 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
10034 gimple_call_set_lhs (g, counter);
10035 gimple_bind_add_stmt (bind, g);
10037 body = create_artificial_label (UNKNOWN_LOCATION);
10038 test = create_artificial_label (UNKNOWN_LOCATION);
10039 gimple_bind_add_stmt (bind, gimple_build_label (body));
10041 tree simt_pred = create_tmp_var (integer_type_node);
10042 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
10043 gimple_call_set_lhs (g, simt_pred);
10044 gimple_bind_add_stmt (bind, g);
10046 tree t = create_artificial_label (UNKNOWN_LOCATION);
10047 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
10048 gimple_bind_add_stmt (bind, g);
10050 gimple_bind_add_stmt (bind, gimple_build_label (t));
10052 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10053 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10054 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10055 gimple_omp_set_body (stmt, NULL);
10057 if (maybe_simt)
10059 gimple_bind_add_stmt (bind, gimple_build_label (test));
10060 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
10061 gimple_bind_add_stmt (bind, g);
10063 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
10064 tree nonneg = create_tmp_var (integer_type_node);
10065 gimple_seq tseq = NULL;
10066 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
10067 gimple_bind_add_seq (bind, tseq);
10069 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
10070 gimple_call_set_lhs (g, nonneg);
10071 gimple_bind_add_stmt (bind, g);
10073 tree end = create_artificial_label (UNKNOWN_LOCATION);
10074 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
10075 gimple_bind_add_stmt (bind, g);
10077 gimple_bind_add_stmt (bind, gimple_build_label (end));
10079 if (simd)
10080 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
10081 build_int_cst (NULL_TREE, threads));
10082 else
10083 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
10085 gimple_bind_add_stmt (bind, x);
10087 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10089 pop_gimplify_context (bind);
10091 gimple_bind_append_vars (bind, ctx->block_vars);
10092 BLOCK_VARS (block) = gimple_bind_vars (bind);
10096 /* Expand code for an OpenMP scan directive and the structured block
10097 before the scan directive. */
10099 static void
10100 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10102 gimple *stmt = gsi_stmt (*gsi_p);
10103 bool has_clauses
10104 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
10105 tree lane = NULL_TREE;
10106 gimple_seq before = NULL;
10107 omp_context *octx = ctx->outer;
10108 gcc_assert (octx);
10109 if (octx->scan_exclusive && !has_clauses)
10111 gimple_stmt_iterator gsi2 = *gsi_p;
10112 gsi_next (&gsi2);
10113 gimple *stmt2 = gsi_stmt (gsi2);
10114 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10115 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10116 the one with exclusive clause(s), comes first. */
10117 if (stmt2
10118 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
10119 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
10121 gsi_remove (gsi_p, false);
10122 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
10123 ctx = maybe_lookup_ctx (stmt2);
10124 gcc_assert (ctx);
10125 lower_omp_scan (gsi_p, ctx);
10126 return;
10130 bool input_phase = has_clauses ^ octx->scan_inclusive;
10131 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10132 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
10133 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10134 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
10135 && !gimple_omp_for_combined_p (octx->stmt));
10136 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
10137 if (is_for_simd && octx->for_simd_scan_phase)
10138 is_simd = false;
10139 if (is_simd)
10140 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
10141 OMP_CLAUSE__SIMDUID_))
10143 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10144 lane = create_tmp_var (unsigned_type_node);
10145 tree t = build_int_cst (integer_type_node,
10146 input_phase ? 1
10147 : octx->scan_inclusive ? 2 : 3);
10148 gimple *g
10149 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10150 gimple_call_set_lhs (g, lane);
10151 gimple_seq_add_stmt (&before, g);
10154 if (is_simd || is_for)
10156 for (tree c = gimple_omp_for_clauses (octx->stmt);
10157 c; c = OMP_CLAUSE_CHAIN (c))
10158 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10159 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10161 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10162 tree var = OMP_CLAUSE_DECL (c);
10163 tree new_var = lookup_decl (var, octx);
10164 tree val = new_var;
10165 tree var2 = NULL_TREE;
10166 tree var3 = NULL_TREE;
10167 tree var4 = NULL_TREE;
10168 tree lane0 = NULL_TREE;
10169 tree new_vard = new_var;
10170 if (omp_privatize_by_reference (var))
10172 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10173 val = new_var;
10175 if (DECL_HAS_VALUE_EXPR_P (new_vard))
10177 val = DECL_VALUE_EXPR (new_vard);
10178 if (new_vard != new_var)
10180 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10181 val = TREE_OPERAND (val, 0);
10183 if (TREE_CODE (val) == ARRAY_REF
10184 && VAR_P (TREE_OPERAND (val, 0)))
10186 tree v = TREE_OPERAND (val, 0);
10187 if (lookup_attribute ("omp simd array",
10188 DECL_ATTRIBUTES (v)))
10190 val = unshare_expr (val);
10191 lane0 = TREE_OPERAND (val, 1);
10192 TREE_OPERAND (val, 1) = lane;
10193 var2 = lookup_decl (v, octx);
10194 if (octx->scan_exclusive)
10195 var4 = lookup_decl (var2, octx);
10196 if (input_phase
10197 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10198 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10199 if (!input_phase)
10201 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10202 var2, lane, NULL_TREE, NULL_TREE);
10203 TREE_THIS_NOTRAP (var2) = 1;
10204 if (octx->scan_exclusive)
10206 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10207 var4, lane, NULL_TREE,
10208 NULL_TREE);
10209 TREE_THIS_NOTRAP (var4) = 1;
10212 else
10213 var2 = val;
10216 gcc_assert (var2);
10218 else
10220 var2 = build_outer_var_ref (var, octx);
10221 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10223 var3 = maybe_lookup_decl (new_vard, octx);
10224 if (var3 == new_vard || var3 == NULL_TREE)
10225 var3 = NULL_TREE;
10226 else if (is_simd && octx->scan_exclusive && !input_phase)
10228 var4 = maybe_lookup_decl (var3, octx);
10229 if (var4 == var3 || var4 == NULL_TREE)
10231 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10233 var4 = var3;
10234 var3 = NULL_TREE;
10236 else
10237 var4 = NULL_TREE;
10241 if (is_simd
10242 && octx->scan_exclusive
10243 && !input_phase
10244 && var4 == NULL_TREE)
10245 var4 = create_tmp_var (TREE_TYPE (val));
10247 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10249 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10250 if (input_phase)
10252 if (var3)
10254 /* If we've added a separate identity element
10255 variable, copy it over into val. */
10256 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10257 var3);
10258 gimplify_and_add (x, &before);
10260 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10262 /* Otherwise, assign to it the identity element. */
10263 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10264 if (is_for)
10265 tseq = copy_gimple_seq_and_replace_locals (tseq);
10266 tree ref = build_outer_var_ref (var, octx);
10267 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10268 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10269 if (x)
10271 if (new_vard != new_var)
10272 val = build_fold_addr_expr_loc (clause_loc, val);
10273 SET_DECL_VALUE_EXPR (new_vard, val);
10275 SET_DECL_VALUE_EXPR (placeholder, ref);
10276 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10277 lower_omp (&tseq, octx);
10278 if (x)
10279 SET_DECL_VALUE_EXPR (new_vard, x);
10280 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10281 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10282 gimple_seq_add_seq (&before, tseq);
10283 if (is_simd)
10284 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10287 else if (is_simd)
10289 tree x;
10290 if (octx->scan_exclusive)
10292 tree v4 = unshare_expr (var4);
10293 tree v2 = unshare_expr (var2);
10294 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10295 gimplify_and_add (x, &before);
10297 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10298 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10299 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10300 tree vexpr = val;
10301 if (x && new_vard != new_var)
10302 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10303 if (x)
10304 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10305 SET_DECL_VALUE_EXPR (placeholder, var2);
10306 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10307 lower_omp (&tseq, octx);
10308 gimple_seq_add_seq (&before, tseq);
10309 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10310 if (x)
10311 SET_DECL_VALUE_EXPR (new_vard, x);
10312 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10313 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10314 if (octx->scan_inclusive)
10316 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10317 var2);
10318 gimplify_and_add (x, &before);
10320 else if (lane0 == NULL_TREE)
10322 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10323 var4);
10324 gimplify_and_add (x, &before);
10328 else
10330 if (input_phase)
10332 /* input phase. Set val to initializer before
10333 the body. */
10334 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10335 gimplify_assign (val, x, &before);
10337 else if (is_simd)
10339 /* scan phase. */
10340 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10341 if (code == MINUS_EXPR)
10342 code = PLUS_EXPR;
10344 tree x = build2 (code, TREE_TYPE (var2),
10345 unshare_expr (var2), unshare_expr (val));
10346 if (octx->scan_inclusive)
10348 gimplify_assign (unshare_expr (var2), x, &before);
10349 gimplify_assign (val, var2, &before);
10351 else
10353 gimplify_assign (unshare_expr (var4),
10354 unshare_expr (var2), &before);
10355 gimplify_assign (var2, x, &before);
10356 if (lane0 == NULL_TREE)
10357 gimplify_assign (val, var4, &before);
10361 if (octx->scan_exclusive && !input_phase && lane0)
10363 tree vexpr = unshare_expr (var4);
10364 TREE_OPERAND (vexpr, 1) = lane0;
10365 if (new_vard != new_var)
10366 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10367 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10371 if (is_simd && !is_for_simd)
10373 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10374 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10375 gsi_replace (gsi_p, gimple_build_nop (), true);
10376 return;
10378 lower_omp (gimple_omp_body_ptr (stmt), octx);
10379 if (before)
10381 gimple_stmt_iterator gsi = gsi_start (*gimple_omp_body_ptr (stmt));
10382 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10387 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10388 substitution of a couple of function calls. But in the NAMED case,
10389 requires that languages coordinate a symbol name. It is therefore
10390 best put here in common code. */
10392 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10394 static void
10395 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10397 tree block;
10398 tree name, lock, unlock;
10399 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10400 gbind *bind;
10401 location_t loc = gimple_location (stmt);
10402 gimple_seq tbody;
10404 name = gimple_omp_critical_name (stmt);
10405 if (name)
10407 tree decl;
10409 if (!critical_name_mutexes)
10410 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10412 tree *n = critical_name_mutexes->get (name);
10413 if (n == NULL)
10415 char *new_str;
10417 decl = create_tmp_var_raw (ptr_type_node);
10419 new_str = ACONCAT ((".gomp_critical_user_",
10420 IDENTIFIER_POINTER (name), NULL));
10421 DECL_NAME (decl) = get_identifier (new_str);
10422 TREE_PUBLIC (decl) = 1;
10423 TREE_STATIC (decl) = 1;
10424 DECL_COMMON (decl) = 1;
10425 DECL_ARTIFICIAL (decl) = 1;
10426 DECL_IGNORED_P (decl) = 1;
10428 varpool_node::finalize_decl (decl);
10430 critical_name_mutexes->put (name, decl);
10432 else
10433 decl = *n;
10435 /* If '#pragma omp critical' is inside offloaded region or
10436 inside function marked as offloadable, the symbol must be
10437 marked as offloadable too. */
10438 omp_context *octx;
10439 if (cgraph_node::get (current_function_decl)->offloadable)
10440 varpool_node::get_create (decl)->offloadable = 1;
10441 else
10442 for (octx = ctx->outer; octx; octx = octx->outer)
10443 if (is_gimple_omp_offloaded (octx->stmt))
10445 varpool_node::get_create (decl)->offloadable = 1;
10446 break;
10449 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10450 lock = build_call_expr_loc (loc, lock, 1,
10451 build_fold_addr_expr_loc (loc, decl));
10453 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10454 unlock = build_call_expr_loc (loc, unlock, 1,
10455 build_fold_addr_expr_loc (loc, decl));
10457 else
10459 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10460 lock = build_call_expr_loc (loc, lock, 0);
10462 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10463 unlock = build_call_expr_loc (loc, unlock, 0);
10466 push_gimplify_context ();
10468 block = make_node (BLOCK);
10469 bind = gimple_build_bind (NULL, NULL, block);
10470 gsi_replace (gsi_p, bind, true);
10471 gimple_bind_add_stmt (bind, stmt);
10473 tbody = gimple_bind_body (bind);
10474 gimplify_and_add (lock, &tbody);
10475 gimple_bind_set_body (bind, tbody);
10477 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10478 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10479 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10480 gimple_omp_set_body (stmt, NULL);
10482 tbody = gimple_bind_body (bind);
10483 gimplify_and_add (unlock, &tbody);
10484 gimple_bind_set_body (bind, tbody);
10486 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10488 pop_gimplify_context (bind);
10489 gimple_bind_append_vars (bind, ctx->block_vars);
10490 BLOCK_VARS (block) = gimple_bind_vars (bind);
10493 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10494 for a lastprivate clause. Given a loop control predicate of (V
10495 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10496 is appended to *DLIST, iterator initialization is appended to
10497 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10498 to be emitted in a critical section. */
10500 static void
10501 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10502 gimple_seq *dlist, gimple_seq *clist,
10503 struct omp_context *ctx)
10505 tree clauses, cond, vinit;
10506 enum tree_code cond_code;
10507 gimple_seq stmts;
10509 cond_code = fd->loop.cond_code;
10510 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10512 /* When possible, use a strict equality expression. This can let VRP
10513 type optimizations deduce the value and remove a copy. */
10514 if (tree_fits_shwi_p (fd->loop.step))
10516 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10517 if (step == 1 || step == -1)
10518 cond_code = EQ_EXPR;
10521 tree n2 = fd->loop.n2;
10522 if (fd->collapse > 1
10523 && TREE_CODE (n2) != INTEGER_CST
10524 && gimple_omp_for_combined_into_p (fd->for_stmt))
10526 struct omp_context *taskreg_ctx = NULL;
10527 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10529 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10530 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10531 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10533 if (gimple_omp_for_combined_into_p (gfor))
10535 gcc_assert (ctx->outer->outer
10536 && is_parallel_ctx (ctx->outer->outer));
10537 taskreg_ctx = ctx->outer->outer;
10539 else
10541 struct omp_for_data outer_fd;
10542 omp_extract_for_data (gfor, &outer_fd, NULL);
10543 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10546 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10547 taskreg_ctx = ctx->outer->outer;
10549 else if (is_taskreg_ctx (ctx->outer))
10550 taskreg_ctx = ctx->outer;
10551 if (taskreg_ctx)
10553 int i;
10554 tree taskreg_clauses
10555 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10556 tree innerc = omp_find_clause (taskreg_clauses,
10557 OMP_CLAUSE__LOOPTEMP_);
10558 gcc_assert (innerc);
10559 int count = fd->collapse;
10560 if (fd->non_rect
10561 && fd->last_nonrect == fd->first_nonrect + 1)
10562 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10563 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10564 count += 4;
10565 for (i = 0; i < count; i++)
10567 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10568 OMP_CLAUSE__LOOPTEMP_);
10569 gcc_assert (innerc);
10571 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10572 OMP_CLAUSE__LOOPTEMP_);
10573 if (innerc)
10574 n2 = fold_convert (TREE_TYPE (n2),
10575 lookup_decl (OMP_CLAUSE_DECL (innerc),
10576 taskreg_ctx));
10579 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10581 clauses = gimple_omp_for_clauses (fd->for_stmt);
10582 stmts = NULL;
10583 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10584 if (!gimple_seq_empty_p (stmts))
10586 gimple_seq_add_seq (&stmts, *dlist);
10587 *dlist = stmts;
10589 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10590 vinit = fd->loop.n1;
10591 if (cond_code == EQ_EXPR
10592 && tree_fits_shwi_p (fd->loop.n2)
10593 && ! integer_zerop (fd->loop.n2))
10594 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10595 else
10596 vinit = unshare_expr (vinit);
10598 /* Initialize the iterator variable, so that threads that don't execute
10599 any iterations don't execute the lastprivate clauses by accident. */
10600 gimplify_assign (fd->loop.v, vinit, body_p);
10604 /* OpenACC privatization.
10606 Or, in other words, *sharing* at the respective OpenACC level of
10607 parallelism.
10609 From a correctness perspective, a non-addressable variable can't be accessed
10610 outside the current thread, so it can go in a (faster than shared memory)
10611 register -- though that register may need to be broadcast in some
10612 circumstances. A variable can only meaningfully be "shared" across workers
10613 or vector lanes if its address is taken, e.g. by a call to an atomic
10614 builtin.
10616 From an optimisation perspective, the answer might be fuzzier: maybe
10617 sometimes, using shared memory directly would be faster than
10618 broadcasting. */
10620 static void
10621 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10622 const location_t loc, const tree c,
10623 const tree decl)
10625 const dump_user_location_t d_u_loc
10626 = dump_user_location_t::from_location_t (loc);
10627 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10628 #if __GNUC__ >= 10
10629 # pragma GCC diagnostic push
10630 # pragma GCC diagnostic ignored "-Wformat"
10631 #endif
10632 dump_printf_loc (l_dump_flags, d_u_loc,
10633 "variable %<%T%> ", decl);
10634 #if __GNUC__ >= 10
10635 # pragma GCC diagnostic pop
10636 #endif
10637 if (c)
10638 dump_printf (l_dump_flags,
10639 "in %qs clause ",
10640 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10641 else
10642 dump_printf (l_dump_flags,
10643 "declared in block ");
10646 static bool
10647 oacc_privatization_candidate_p (const location_t loc, const tree c,
10648 const tree decl)
10650 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10652 /* There is some differentiation depending on block vs. clause. */
10653 bool block = !c;
10655 bool res = true;
10657 if (res && !VAR_P (decl))
10659 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10660 privatized into a new VAR_DECL. */
10661 gcc_checking_assert (TREE_CODE (decl) != PARM_DECL);
10663 res = false;
10665 if (dump_enabled_p ())
10667 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10668 dump_printf (l_dump_flags,
10669 "potentially has improper OpenACC privatization level: %qs\n",
10670 get_tree_code_name (TREE_CODE (decl)));
10674 if (res && block && TREE_STATIC (decl))
10676 res = false;
10678 if (dump_enabled_p ())
10680 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10681 dump_printf (l_dump_flags,
10682 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10683 "static");
10687 if (res && block && DECL_EXTERNAL (decl))
10689 res = false;
10691 if (dump_enabled_p ())
10693 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10694 dump_printf (l_dump_flags,
10695 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10696 "external");
10700 if (res && !TREE_ADDRESSABLE (decl))
10702 res = false;
10704 if (dump_enabled_p ())
10706 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10707 dump_printf (l_dump_flags,
10708 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10709 "not addressable");
10713 /* If an artificial variable has been added to a bind, e.g.
10714 a compiler-generated temporary structure used by the Fortran front-end, do
10715 not consider it as a privatization candidate. Note that variables on
10716 the stack are private per-thread by default: making them "gang-private"
10717 for OpenACC actually means to share a single instance of a variable
10718 amongst all workers and threads spawned within each gang.
10719 At present, no compiler-generated artificial variables require such
10720 sharing semantics, so this is safe. */
10722 if (res && block && DECL_ARTIFICIAL (decl))
10724 res = false;
10726 if (dump_enabled_p ())
10728 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10729 dump_printf (l_dump_flags,
10730 "isn%'t candidate for adjusting OpenACC privatization "
10731 "level: %s\n", "artificial");
10735 if (res)
10737 if (dump_enabled_p ())
10739 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10740 dump_printf (l_dump_flags,
10741 "is candidate for adjusting OpenACC privatization level\n");
10745 if (dump_file && (dump_flags & TDF_DETAILS))
10747 print_generic_decl (dump_file, decl, dump_flags);
10748 fprintf (dump_file, "\n");
10751 return res;
10754 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10755 CTX. */
10757 static void
10758 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10760 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10761 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10763 tree decl = OMP_CLAUSE_DECL (c);
10765 tree new_decl = lookup_decl (decl, ctx);
10767 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c,
10768 new_decl))
10769 continue;
10771 gcc_checking_assert
10772 (!ctx->oacc_privatization_candidates.contains (new_decl));
10773 ctx->oacc_privatization_candidates.safe_push (new_decl);
10777 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10778 CTX. */
10780 static void
10781 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10783 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10785 tree new_decl = lookup_decl (decl, ctx);
10786 gcc_checking_assert (new_decl == decl);
10788 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL,
10789 new_decl))
10790 continue;
10792 gcc_checking_assert
10793 (!ctx->oacc_privatization_candidates.contains (new_decl));
10794 ctx->oacc_privatization_candidates.safe_push (new_decl);
10798 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10800 static tree
10801 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10802 struct walk_stmt_info *wi)
10804 gimple *stmt = gsi_stmt (*gsi_p);
10806 *handled_ops_p = true;
10807 switch (gimple_code (stmt))
10809 WALK_SUBSTMTS;
10811 case GIMPLE_OMP_FOR:
10812 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10813 && gimple_omp_for_combined_into_p (stmt))
10814 *handled_ops_p = false;
10815 break;
10817 case GIMPLE_OMP_SCAN:
10818 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10819 return integer_zero_node;
10820 default:
10821 break;
10823 return NULL;
10826 /* Helper function for lower_omp_for, add transformations for a worksharing
10827 loop with scan directives inside of it.
10828 For worksharing loop not combined with simd, transform:
10829 #pragma omp for reduction(inscan,+:r) private(i)
10830 for (i = 0; i < n; i = i + 1)
10833 update (r);
10835 #pragma omp scan inclusive(r)
10837 use (r);
10841 into two worksharing loops + code to merge results:
10843 num_threads = omp_get_num_threads ();
10844 thread_num = omp_get_thread_num ();
10845 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10846 <D.2099>:
10847 var2 = r;
10848 goto <D.2101>;
10849 <D.2100>:
10850 // For UDRs this is UDR init, or if ctors are needed, copy from
10851 // var3 that has been constructed to contain the neutral element.
10852 var2 = 0;
10853 <D.2101>:
10854 ivar = 0;
10855 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10856 // a shared array with num_threads elements and rprivb to a local array
10857 // number of elements equal to the number of (contiguous) iterations the
10858 // current thread will perform. controlb and controlp variables are
10859 // temporaries to handle deallocation of rprivb at the end of second
10860 // GOMP_FOR.
10861 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10862 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10863 for (i = 0; i < n; i = i + 1)
10866 // For UDRs this is UDR init or copy from var3.
10867 r = 0;
10868 // This is the input phase from user code.
10869 update (r);
10872 // For UDRs this is UDR merge.
10873 var2 = var2 + r;
10874 // Rather than handing it over to the user, save to local thread's
10875 // array.
10876 rprivb[ivar] = var2;
10877 // For exclusive scan, the above two statements are swapped.
10878 ivar = ivar + 1;
10881 // And remember the final value from this thread's into the shared
10882 // rpriva array.
10883 rpriva[(sizetype) thread_num] = var2;
10884 // If more than one thread, compute using Work-Efficient prefix sum
10885 // the inclusive parallel scan of the rpriva array.
10886 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10887 <D.2102>:
10888 GOMP_barrier ();
10889 down = 0;
10890 k = 1;
10891 num_threadsu = (unsigned int) num_threads;
10892 thread_numup1 = (unsigned int) thread_num + 1;
10893 <D.2108>:
10894 twok = k << 1;
10895 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10896 <D.2110>:
10897 down = 4294967295;
10898 k = k >> 1;
10899 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10900 <D.2112>:
10901 k = k >> 1;
10902 <D.2111>:
10903 twok = k << 1;
10904 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10905 mul = REALPART_EXPR <cplx>;
10906 ovf = IMAGPART_EXPR <cplx>;
10907 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10908 <D.2116>:
10909 andv = k & down;
10910 andvm1 = andv + 4294967295;
10911 l = mul + andvm1;
10912 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10913 <D.2120>:
10914 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10915 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10916 rpriva[l] = rpriva[l - k] + rpriva[l];
10917 <D.2117>:
10918 if (down == 0) goto <D.2121>; else goto <D.2122>;
10919 <D.2121>:
10920 k = k << 1;
10921 goto <D.2123>;
10922 <D.2122>:
10923 k = k >> 1;
10924 <D.2123>:
10925 GOMP_barrier ();
10926 if (k != 0) goto <D.2108>; else goto <D.2103>;
10927 <D.2103>:
10928 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10929 <D.2124>:
10930 // For UDRs this is UDR init or copy from var3.
10931 var2 = 0;
10932 goto <D.2126>;
10933 <D.2125>:
10934 var2 = rpriva[thread_num - 1];
10935 <D.2126>:
10936 ivar = 0;
10937 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10938 reduction(inscan,+:r) private(i)
10939 for (i = 0; i < n; i = i + 1)
10942 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10943 r = var2 + rprivb[ivar];
10946 // This is the scan phase from user code.
10947 use (r);
10948 // Plus a bump of the iterator.
10949 ivar = ivar + 1;
10951 } */
10953 static void
10954 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10955 struct omp_for_data *fd, omp_context *ctx)
10957 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10958 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10960 gimple_seq body = gimple_omp_body (stmt);
10961 gimple_stmt_iterator input1_gsi = gsi_none ();
10962 struct walk_stmt_info wi;
10963 memset (&wi, 0, sizeof (wi));
10964 wi.val_only = true;
10965 wi.info = (void *) &input1_gsi;
10966 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10967 gcc_assert (!gsi_end_p (input1_gsi));
10969 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10970 gimple_stmt_iterator gsi = input1_gsi;
10971 gsi_next (&gsi);
10972 gimple_stmt_iterator scan1_gsi = gsi;
10973 gimple *scan_stmt1 = gsi_stmt (gsi);
10974 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10976 gimple_seq input_body = gimple_omp_body (input_stmt1);
10977 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10978 gimple_omp_set_body (input_stmt1, NULL);
10979 gimple_omp_set_body (scan_stmt1, NULL);
10980 gimple_omp_set_body (stmt, NULL);
10982 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10983 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10984 gimple_omp_set_body (stmt, body);
10985 gimple_omp_set_body (input_stmt1, input_body);
10987 gimple_stmt_iterator input2_gsi = gsi_none ();
10988 memset (&wi, 0, sizeof (wi));
10989 wi.val_only = true;
10990 wi.info = (void *) &input2_gsi;
10991 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10992 gcc_assert (!gsi_end_p (input2_gsi));
10994 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10995 gsi = input2_gsi;
10996 gsi_next (&gsi);
10997 gimple_stmt_iterator scan2_gsi = gsi;
10998 gimple *scan_stmt2 = gsi_stmt (gsi);
10999 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
11000 gimple_omp_set_body (scan_stmt2, scan_body);
11002 gimple_stmt_iterator input3_gsi = gsi_none ();
11003 gimple_stmt_iterator scan3_gsi = gsi_none ();
11004 gimple_stmt_iterator input4_gsi = gsi_none ();
11005 gimple_stmt_iterator scan4_gsi = gsi_none ();
11006 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
11007 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
11008 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
11009 if (is_for_simd)
11011 memset (&wi, 0, sizeof (wi));
11012 wi.val_only = true;
11013 wi.info = (void *) &input3_gsi;
11014 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
11015 gcc_assert (!gsi_end_p (input3_gsi));
11017 input_stmt3 = gsi_stmt (input3_gsi);
11018 gsi = input3_gsi;
11019 gsi_next (&gsi);
11020 scan3_gsi = gsi;
11021 scan_stmt3 = gsi_stmt (gsi);
11022 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
11024 memset (&wi, 0, sizeof (wi));
11025 wi.val_only = true;
11026 wi.info = (void *) &input4_gsi;
11027 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
11028 gcc_assert (!gsi_end_p (input4_gsi));
11030 input_stmt4 = gsi_stmt (input4_gsi);
11031 gsi = input4_gsi;
11032 gsi_next (&gsi);
11033 scan4_gsi = gsi;
11034 scan_stmt4 = gsi_stmt (gsi);
11035 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
11037 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
11038 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
11041 tree num_threads = create_tmp_var (integer_type_node);
11042 tree thread_num = create_tmp_var (integer_type_node);
11043 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
11044 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
11045 gimple *g = gimple_build_call (nthreads_decl, 0);
11046 gimple_call_set_lhs (g, num_threads);
11047 gimple_seq_add_stmt (body_p, g);
11048 g = gimple_build_call (threadnum_decl, 0);
11049 gimple_call_set_lhs (g, thread_num);
11050 gimple_seq_add_stmt (body_p, g);
11052 tree ivar = create_tmp_var (sizetype);
11053 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
11054 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
11055 tree k = create_tmp_var (unsigned_type_node);
11056 tree l = create_tmp_var (unsigned_type_node);
11058 gimple_seq clist = NULL, mdlist = NULL;
11059 gimple_seq thr01_list = NULL, thrn1_list = NULL;
11060 gimple_seq thr02_list = NULL, thrn2_list = NULL;
11061 gimple_seq scan1_list = NULL, input2_list = NULL;
11062 gimple_seq last_list = NULL, reduc_list = NULL;
11063 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11064 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11065 && OMP_CLAUSE_REDUCTION_INSCAN (c))
11067 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11068 tree var = OMP_CLAUSE_DECL (c);
11069 tree new_var = lookup_decl (var, ctx);
11070 tree var3 = NULL_TREE;
11071 tree new_vard = new_var;
11072 if (omp_privatize_by_reference (var))
11073 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
11074 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11076 var3 = maybe_lookup_decl (new_vard, ctx);
11077 if (var3 == new_vard)
11078 var3 = NULL_TREE;
11081 tree ptype = build_pointer_type (TREE_TYPE (new_var));
11082 tree rpriva = create_tmp_var (ptype);
11083 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
11084 OMP_CLAUSE_DECL (nc) = rpriva;
11085 *cp1 = nc;
11086 cp1 = &OMP_CLAUSE_CHAIN (nc);
11088 tree rprivb = create_tmp_var (ptype);
11089 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
11090 OMP_CLAUSE_DECL (nc) = rprivb;
11091 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
11092 *cp1 = nc;
11093 cp1 = &OMP_CLAUSE_CHAIN (nc);
11095 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
11096 if (new_vard != new_var)
11097 TREE_ADDRESSABLE (var2) = 1;
11098 gimple_add_tmp_var (var2);
11100 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
11101 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11102 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11103 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11104 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
11106 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
11107 thread_num, integer_minus_one_node);
11108 x = fold_convert_loc (clause_loc, sizetype, x);
11109 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11110 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11111 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11112 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
11114 x = fold_convert_loc (clause_loc, sizetype, l);
11115 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11116 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11117 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11118 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
11120 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
11121 x = fold_convert_loc (clause_loc, sizetype, x);
11122 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11123 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11124 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11125 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
11127 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
11128 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11129 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
11130 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
11132 tree var4 = is_for_simd ? new_var : var2;
11133 tree var5 = NULL_TREE, var6 = NULL_TREE;
11134 if (is_for_simd)
11136 var5 = lookup_decl (var, input_simd_ctx);
11137 var6 = lookup_decl (var, scan_simd_ctx);
11138 if (new_vard != new_var)
11140 var5 = build_simple_mem_ref_loc (clause_loc, var5);
11141 var6 = build_simple_mem_ref_loc (clause_loc, var6);
11144 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11146 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
11147 tree val = var2;
11149 x = lang_hooks.decls.omp_clause_default_ctor
11150 (c, var2, build_outer_var_ref (var, ctx));
11151 if (x)
11152 gimplify_and_add (x, &clist);
11154 x = build_outer_var_ref (var, ctx);
11155 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
11157 gimplify_and_add (x, &thr01_list);
11159 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
11160 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
11161 if (var3)
11163 x = unshare_expr (var4);
11164 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11165 gimplify_and_add (x, &thrn1_list);
11166 x = unshare_expr (var4);
11167 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11168 gimplify_and_add (x, &thr02_list);
11170 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
11172 /* Otherwise, assign to it the identity element. */
11173 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11174 tseq = copy_gimple_seq_and_replace_locals (tseq);
11175 if (!is_for_simd)
11177 if (new_vard != new_var)
11178 val = build_fold_addr_expr_loc (clause_loc, val);
11179 SET_DECL_VALUE_EXPR (new_vard, val);
11180 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11182 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11183 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11184 lower_omp (&tseq, ctx);
11185 gimple_seq_add_seq (&thrn1_list, tseq);
11186 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11187 lower_omp (&tseq, ctx);
11188 gimple_seq_add_seq (&thr02_list, tseq);
11189 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11190 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11191 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11192 if (y)
11193 SET_DECL_VALUE_EXPR (new_vard, y);
11194 else
11196 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11197 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11201 x = unshare_expr (var4);
11202 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11203 gimplify_and_add (x, &thrn2_list);
11205 if (is_for_simd)
11207 x = unshare_expr (rprivb_ref);
11208 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11209 gimplify_and_add (x, &scan1_list);
11211 else
11213 if (ctx->scan_exclusive)
11215 x = unshare_expr (rprivb_ref);
11216 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11217 gimplify_and_add (x, &scan1_list);
11220 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11221 tseq = copy_gimple_seq_and_replace_locals (tseq);
11222 SET_DECL_VALUE_EXPR (placeholder, var2);
11223 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11224 lower_omp (&tseq, ctx);
11225 gimple_seq_add_seq (&scan1_list, tseq);
11227 if (ctx->scan_inclusive)
11229 x = unshare_expr (rprivb_ref);
11230 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11231 gimplify_and_add (x, &scan1_list);
11235 x = unshare_expr (rpriva_ref);
11236 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11237 unshare_expr (var4));
11238 gimplify_and_add (x, &mdlist);
11240 x = unshare_expr (is_for_simd ? var6 : new_var);
11241 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11242 gimplify_and_add (x, &input2_list);
11244 val = rprivb_ref;
11245 if (new_vard != new_var)
11246 val = build_fold_addr_expr_loc (clause_loc, val);
11248 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11249 tseq = copy_gimple_seq_and_replace_locals (tseq);
11250 SET_DECL_VALUE_EXPR (new_vard, val);
11251 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11252 if (is_for_simd)
11254 SET_DECL_VALUE_EXPR (placeholder, var6);
11255 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11257 else
11258 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11259 lower_omp (&tseq, ctx);
11260 if (y)
11261 SET_DECL_VALUE_EXPR (new_vard, y);
11262 else
11264 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11265 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11267 if (!is_for_simd)
11269 SET_DECL_VALUE_EXPR (placeholder, new_var);
11270 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11271 lower_omp (&tseq, ctx);
11273 gimple_seq_add_seq (&input2_list, tseq);
11275 x = build_outer_var_ref (var, ctx);
11276 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11277 gimplify_and_add (x, &last_list);
11279 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11280 gimplify_and_add (x, &reduc_list);
11281 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11282 tseq = copy_gimple_seq_and_replace_locals (tseq);
11283 val = rprival_ref;
11284 if (new_vard != new_var)
11285 val = build_fold_addr_expr_loc (clause_loc, val);
11286 SET_DECL_VALUE_EXPR (new_vard, val);
11287 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11288 SET_DECL_VALUE_EXPR (placeholder, var2);
11289 lower_omp (&tseq, ctx);
11290 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11291 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11292 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11293 if (y)
11294 SET_DECL_VALUE_EXPR (new_vard, y);
11295 else
11297 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11298 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11300 gimple_seq_add_seq (&reduc_list, tseq);
11301 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11302 gimplify_and_add (x, &reduc_list);
11304 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11305 if (x)
11306 gimplify_and_add (x, dlist);
11308 else
11310 x = build_outer_var_ref (var, ctx);
11311 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11313 x = omp_reduction_init (c, TREE_TYPE (new_var));
11314 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11315 &thrn1_list);
11316 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11318 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11320 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11321 if (code == MINUS_EXPR)
11322 code = PLUS_EXPR;
11324 if (is_for_simd)
11325 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11326 else
11328 if (ctx->scan_exclusive)
11329 gimplify_assign (unshare_expr (rprivb_ref), var2,
11330 &scan1_list);
11331 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11332 gimplify_assign (var2, x, &scan1_list);
11333 if (ctx->scan_inclusive)
11334 gimplify_assign (unshare_expr (rprivb_ref), var2,
11335 &scan1_list);
11338 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11339 &mdlist);
11341 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11342 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11344 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11345 &last_list);
11347 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11348 unshare_expr (rprival_ref));
11349 gimplify_assign (rprival_ref, x, &reduc_list);
11353 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11354 gimple_seq_add_stmt (&scan1_list, g);
11355 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11356 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11357 ? scan_stmt4 : scan_stmt2), g);
11359 tree controlb = create_tmp_var (boolean_type_node);
11360 tree controlp = create_tmp_var (ptr_type_node);
11361 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11362 OMP_CLAUSE_DECL (nc) = controlb;
11363 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11364 *cp1 = nc;
11365 cp1 = &OMP_CLAUSE_CHAIN (nc);
11366 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11367 OMP_CLAUSE_DECL (nc) = controlp;
11368 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11369 *cp1 = nc;
11370 cp1 = &OMP_CLAUSE_CHAIN (nc);
11371 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11372 OMP_CLAUSE_DECL (nc) = controlb;
11373 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11374 *cp2 = nc;
11375 cp2 = &OMP_CLAUSE_CHAIN (nc);
11376 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11377 OMP_CLAUSE_DECL (nc) = controlp;
11378 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11379 *cp2 = nc;
11380 cp2 = &OMP_CLAUSE_CHAIN (nc);
11382 *cp1 = gimple_omp_for_clauses (stmt);
11383 gimple_omp_for_set_clauses (stmt, new_clauses1);
11384 *cp2 = gimple_omp_for_clauses (new_stmt);
11385 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11387 if (is_for_simd)
11389 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11390 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11392 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11393 GSI_SAME_STMT);
11394 gsi_remove (&input3_gsi, true);
11395 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11396 GSI_SAME_STMT);
11397 gsi_remove (&scan3_gsi, true);
11398 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11399 GSI_SAME_STMT);
11400 gsi_remove (&input4_gsi, true);
11401 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11402 GSI_SAME_STMT);
11403 gsi_remove (&scan4_gsi, true);
11405 else
11407 gimple_omp_set_body (scan_stmt1, scan1_list);
11408 gimple_omp_set_body (input_stmt2, input2_list);
11411 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11412 GSI_SAME_STMT);
11413 gsi_remove (&input1_gsi, true);
11414 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11415 GSI_SAME_STMT);
11416 gsi_remove (&scan1_gsi, true);
11417 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11418 GSI_SAME_STMT);
11419 gsi_remove (&input2_gsi, true);
11420 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11421 GSI_SAME_STMT);
11422 gsi_remove (&scan2_gsi, true);
11424 gimple_seq_add_seq (body_p, clist);
11426 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11427 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11428 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11429 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11430 gimple_seq_add_stmt (body_p, g);
11431 g = gimple_build_label (lab1);
11432 gimple_seq_add_stmt (body_p, g);
11433 gimple_seq_add_seq (body_p, thr01_list);
11434 g = gimple_build_goto (lab3);
11435 gimple_seq_add_stmt (body_p, g);
11436 g = gimple_build_label (lab2);
11437 gimple_seq_add_stmt (body_p, g);
11438 gimple_seq_add_seq (body_p, thrn1_list);
11439 g = gimple_build_label (lab3);
11440 gimple_seq_add_stmt (body_p, g);
11442 g = gimple_build_assign (ivar, size_zero_node);
11443 gimple_seq_add_stmt (body_p, g);
11445 gimple_seq_add_stmt (body_p, stmt);
11446 gimple_seq_add_seq (body_p, body);
11447 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11448 fd->loop.v));
11450 g = gimple_build_omp_return (true);
11451 gimple_seq_add_stmt (body_p, g);
11452 gimple_seq_add_seq (body_p, mdlist);
11454 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11455 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11456 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11457 gimple_seq_add_stmt (body_p, g);
11458 g = gimple_build_label (lab1);
11459 gimple_seq_add_stmt (body_p, g);
11461 g = omp_build_barrier (NULL);
11462 gimple_seq_add_stmt (body_p, g);
11464 tree down = create_tmp_var (unsigned_type_node);
11465 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11466 gimple_seq_add_stmt (body_p, g);
11468 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11469 gimple_seq_add_stmt (body_p, g);
11471 tree num_threadsu = create_tmp_var (unsigned_type_node);
11472 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11473 gimple_seq_add_stmt (body_p, g);
11475 tree thread_numu = create_tmp_var (unsigned_type_node);
11476 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11477 gimple_seq_add_stmt (body_p, g);
11479 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11480 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11481 build_int_cst (unsigned_type_node, 1));
11482 gimple_seq_add_stmt (body_p, g);
11484 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11485 g = gimple_build_label (lab3);
11486 gimple_seq_add_stmt (body_p, g);
11488 tree twok = create_tmp_var (unsigned_type_node);
11489 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11490 gimple_seq_add_stmt (body_p, g);
11492 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11493 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11494 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11495 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11496 gimple_seq_add_stmt (body_p, g);
11497 g = gimple_build_label (lab4);
11498 gimple_seq_add_stmt (body_p, g);
11499 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11500 gimple_seq_add_stmt (body_p, g);
11501 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11502 gimple_seq_add_stmt (body_p, g);
11504 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11505 gimple_seq_add_stmt (body_p, g);
11506 g = gimple_build_label (lab6);
11507 gimple_seq_add_stmt (body_p, g);
11509 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11510 gimple_seq_add_stmt (body_p, g);
11512 g = gimple_build_label (lab5);
11513 gimple_seq_add_stmt (body_p, g);
11515 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11516 gimple_seq_add_stmt (body_p, g);
11518 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11519 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11520 gimple_call_set_lhs (g, cplx);
11521 gimple_seq_add_stmt (body_p, g);
11522 tree mul = create_tmp_var (unsigned_type_node);
11523 g = gimple_build_assign (mul, REALPART_EXPR,
11524 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11525 gimple_seq_add_stmt (body_p, g);
11526 tree ovf = create_tmp_var (unsigned_type_node);
11527 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11528 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11529 gimple_seq_add_stmt (body_p, g);
11531 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11532 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11533 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11534 lab7, lab8);
11535 gimple_seq_add_stmt (body_p, g);
11536 g = gimple_build_label (lab7);
11537 gimple_seq_add_stmt (body_p, g);
11539 tree andv = create_tmp_var (unsigned_type_node);
11540 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11541 gimple_seq_add_stmt (body_p, g);
11542 tree andvm1 = create_tmp_var (unsigned_type_node);
11543 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11544 build_minus_one_cst (unsigned_type_node));
11545 gimple_seq_add_stmt (body_p, g);
11547 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11548 gimple_seq_add_stmt (body_p, g);
11550 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11551 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11552 gimple_seq_add_stmt (body_p, g);
11553 g = gimple_build_label (lab9);
11554 gimple_seq_add_stmt (body_p, g);
11555 gimple_seq_add_seq (body_p, reduc_list);
11556 g = gimple_build_label (lab8);
11557 gimple_seq_add_stmt (body_p, g);
11559 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11560 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11561 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11562 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11563 lab10, lab11);
11564 gimple_seq_add_stmt (body_p, g);
11565 g = gimple_build_label (lab10);
11566 gimple_seq_add_stmt (body_p, g);
11567 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11568 gimple_seq_add_stmt (body_p, g);
11569 g = gimple_build_goto (lab12);
11570 gimple_seq_add_stmt (body_p, g);
11571 g = gimple_build_label (lab11);
11572 gimple_seq_add_stmt (body_p, g);
11573 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11574 gimple_seq_add_stmt (body_p, g);
11575 g = gimple_build_label (lab12);
11576 gimple_seq_add_stmt (body_p, g);
11578 g = omp_build_barrier (NULL);
11579 gimple_seq_add_stmt (body_p, g);
11581 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11582 lab3, lab2);
11583 gimple_seq_add_stmt (body_p, g);
11585 g = gimple_build_label (lab2);
11586 gimple_seq_add_stmt (body_p, g);
11588 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11589 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11590 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11591 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11592 gimple_seq_add_stmt (body_p, g);
11593 g = gimple_build_label (lab1);
11594 gimple_seq_add_stmt (body_p, g);
11595 gimple_seq_add_seq (body_p, thr02_list);
11596 g = gimple_build_goto (lab3);
11597 gimple_seq_add_stmt (body_p, g);
11598 g = gimple_build_label (lab2);
11599 gimple_seq_add_stmt (body_p, g);
11600 gimple_seq_add_seq (body_p, thrn2_list);
11601 g = gimple_build_label (lab3);
11602 gimple_seq_add_stmt (body_p, g);
11604 g = gimple_build_assign (ivar, size_zero_node);
11605 gimple_seq_add_stmt (body_p, g);
11606 gimple_seq_add_stmt (body_p, new_stmt);
11607 gimple_seq_add_seq (body_p, new_body);
11609 gimple_seq new_dlist = NULL;
11610 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11611 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11612 tree num_threadsm1 = create_tmp_var (integer_type_node);
11613 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11614 integer_minus_one_node);
11615 gimple_seq_add_stmt (&new_dlist, g);
11616 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11617 gimple_seq_add_stmt (&new_dlist, g);
11618 g = gimple_build_label (lab1);
11619 gimple_seq_add_stmt (&new_dlist, g);
11620 gimple_seq_add_seq (&new_dlist, last_list);
11621 g = gimple_build_label (lab2);
11622 gimple_seq_add_stmt (&new_dlist, g);
11623 gimple_seq_add_seq (&new_dlist, *dlist);
11624 *dlist = new_dlist;
11627 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11628 the addresses of variables to be made private at the surrounding
11629 parallelism level. Such functions appear in the gimple code stream in two
11630 forms, e.g. for a partitioned loop:
11632 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11633 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11634 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11635 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11637 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11638 not as part of a HEAD_MARK sequence:
11640 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11642 For such stand-alone appearances, the 3rd argument is always 0, denoting
11643 gang partitioning. */
11645 static gcall *
11646 lower_oacc_private_marker (omp_context *ctx)
11648 if (ctx->oacc_privatization_candidates.length () == 0)
11649 return NULL;
11651 auto_vec<tree, 5> args;
11653 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11654 args.quick_push (integer_zero_node);
11655 args.quick_push (integer_minus_one_node);
11657 int i;
11658 tree decl;
11659 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11661 gcc_checking_assert (TREE_ADDRESSABLE (decl));
11662 tree addr = build_fold_addr_expr (decl);
11663 args.safe_push (addr);
11666 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11669 /* Lower code for an OMP loop directive. */
11671 static void
11672 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11674 tree *rhs_p, block;
11675 struct omp_for_data fd, *fdp = NULL;
11676 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11677 gbind *new_stmt;
11678 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11679 gimple_seq cnt_list = NULL, clist = NULL;
11680 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11681 size_t i;
11683 push_gimplify_context ();
11685 if (is_gimple_omp_oacc (ctx->stmt))
11686 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11688 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11690 block = make_node (BLOCK);
11691 new_stmt = gimple_build_bind (NULL, NULL, block);
11692 /* Replace at gsi right away, so that 'stmt' is no member
11693 of a sequence anymore as we're going to add to a different
11694 one below. */
11695 gsi_replace (gsi_p, new_stmt, true);
11697 /* Move declaration of temporaries in the loop body before we make
11698 it go away. */
11699 omp_for_body = gimple_omp_body (stmt);
11700 if (!gimple_seq_empty_p (omp_for_body)
11701 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11703 gbind *inner_bind
11704 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11705 tree vars = gimple_bind_vars (inner_bind);
11706 if (is_gimple_omp_oacc (ctx->stmt))
11707 oacc_privatization_scan_decl_chain (ctx, vars);
11708 gimple_bind_append_vars (new_stmt, vars);
11709 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11710 keep them on the inner_bind and it's block. */
11711 gimple_bind_set_vars (inner_bind, NULL_TREE);
11712 if (gimple_bind_block (inner_bind))
11713 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11716 if (gimple_omp_for_combined_into_p (stmt))
11718 omp_extract_for_data (stmt, &fd, NULL);
11719 fdp = &fd;
11721 /* We need two temporaries with fd.loop.v type (istart/iend)
11722 and then (fd.collapse - 1) temporaries with the same
11723 type for count2 ... countN-1 vars if not constant. */
11724 size_t count = 2;
11725 tree type = fd.iter_type;
11726 if (fd.collapse > 1
11727 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11728 count += fd.collapse - 1;
11729 size_t count2 = 0;
11730 tree type2 = NULL_TREE;
11731 bool taskreg_for
11732 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11733 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11734 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11735 tree simtc = NULL;
11736 tree clauses = *pc;
11737 if (fd.collapse > 1
11738 && fd.non_rect
11739 && fd.last_nonrect == fd.first_nonrect + 1
11740 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11741 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11742 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11744 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11745 type2 = TREE_TYPE (v);
11746 count++;
11747 count2 = 3;
11749 if (taskreg_for)
11750 outerc
11751 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11752 OMP_CLAUSE__LOOPTEMP_);
11753 if (ctx->simt_stmt)
11754 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11755 OMP_CLAUSE__LOOPTEMP_);
11756 for (i = 0; i < count + count2; i++)
11758 tree temp;
11759 if (taskreg_for)
11761 gcc_assert (outerc);
11762 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11763 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11764 OMP_CLAUSE__LOOPTEMP_);
11766 else
11768 /* If there are 2 adjacent SIMD stmts, one with _simt_
11769 clause, another without, make sure they have the same
11770 decls in _looptemp_ clauses, because the outer stmt
11771 they are combined into will look up just one inner_stmt. */
11772 if (ctx->simt_stmt)
11773 temp = OMP_CLAUSE_DECL (simtc);
11774 else
11775 temp = create_tmp_var (i >= count ? type2 : type);
11776 insert_decl_map (&ctx->outer->cb, temp, temp);
11778 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11779 OMP_CLAUSE_DECL (*pc) = temp;
11780 pc = &OMP_CLAUSE_CHAIN (*pc);
11781 if (ctx->simt_stmt)
11782 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11783 OMP_CLAUSE__LOOPTEMP_);
11785 *pc = clauses;
11788 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11789 dlist = NULL;
11790 body = NULL;
11791 tree rclauses
11792 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11793 OMP_CLAUSE_REDUCTION);
11794 tree rtmp = NULL_TREE;
11795 if (rclauses)
11797 tree type = build_pointer_type (pointer_sized_int_node);
11798 tree temp = create_tmp_var (type);
11799 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11800 OMP_CLAUSE_DECL (c) = temp;
11801 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11802 gimple_omp_for_set_clauses (stmt, c);
11803 lower_omp_task_reductions (ctx, OMP_FOR,
11804 gimple_omp_for_clauses (stmt),
11805 &tred_ilist, &tred_dlist);
11806 rclauses = c;
11807 rtmp = make_ssa_name (type);
11808 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11811 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11812 ctx);
11814 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11815 fdp);
11816 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11817 gimple_omp_for_pre_body (stmt));
11819 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11821 gcall *private_marker = NULL;
11822 if (is_gimple_omp_oacc (ctx->stmt)
11823 && !gimple_seq_empty_p (omp_for_body))
11824 private_marker = lower_oacc_private_marker (ctx);
11826 /* Lower the header expressions. At this point, we can assume that
11827 the header is of the form:
11829 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11831 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11832 using the .omp_data_s mapping, if needed. */
11833 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11835 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11836 if (TREE_CODE (*rhs_p) == TREE_VEC)
11838 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11839 TREE_VEC_ELT (*rhs_p, 1)
11840 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11841 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11842 TREE_VEC_ELT (*rhs_p, 2)
11843 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11845 else if (!is_gimple_min_invariant (*rhs_p))
11846 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11847 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11848 recompute_tree_invariant_for_addr_expr (*rhs_p);
11850 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11851 if (TREE_CODE (*rhs_p) == TREE_VEC)
11853 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11854 TREE_VEC_ELT (*rhs_p, 1)
11855 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11856 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11857 TREE_VEC_ELT (*rhs_p, 2)
11858 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11860 else if (!is_gimple_min_invariant (*rhs_p))
11861 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11862 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11863 recompute_tree_invariant_for_addr_expr (*rhs_p);
11865 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11866 if (!is_gimple_min_invariant (*rhs_p))
11867 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11869 if (rclauses)
11870 gimple_seq_add_seq (&tred_ilist, cnt_list);
11871 else
11872 gimple_seq_add_seq (&body, cnt_list);
11874 /* Once lowered, extract the bounds and clauses. */
11875 omp_extract_for_data (stmt, &fd, NULL);
11877 if (is_gimple_omp_oacc (ctx->stmt)
11878 && !ctx_in_oacc_kernels_region (ctx))
11879 lower_oacc_head_tail (gimple_location (stmt),
11880 gimple_omp_for_clauses (stmt), private_marker,
11881 &oacc_head, &oacc_tail, ctx);
11883 /* Add OpenACC partitioning and reduction markers just before the loop. */
11884 if (oacc_head)
11885 gimple_seq_add_seq (&body, oacc_head);
11887 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11889 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11890 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11891 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11892 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11894 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11895 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11896 OMP_CLAUSE_LINEAR_STEP (c)
11897 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11898 ctx);
11901 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11902 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11903 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11904 else
11906 gimple_seq_add_stmt (&body, stmt);
11907 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11910 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11911 fd.loop.v));
11913 /* After the loop, add exit clauses. */
11914 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11916 if (clist)
11918 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11919 gcall *g = gimple_build_call (fndecl, 0);
11920 gimple_seq_add_stmt (&body, g);
11921 gimple_seq_add_seq (&body, clist);
11922 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11923 g = gimple_build_call (fndecl, 0);
11924 gimple_seq_add_stmt (&body, g);
11927 if (ctx->cancellable)
11928 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11930 gimple_seq_add_seq (&body, dlist);
11932 if (rclauses)
11934 gimple_seq_add_seq (&tred_ilist, body);
11935 body = tred_ilist;
11938 body = maybe_catch_exception (body);
11940 /* Region exit marker goes at the end of the loop body. */
11941 gimple *g = gimple_build_omp_return (fd.have_nowait);
11942 gimple_seq_add_stmt (&body, g);
11944 gimple_seq_add_seq (&body, tred_dlist);
11946 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11948 if (rclauses)
11949 OMP_CLAUSE_DECL (rclauses) = rtmp;
11951 /* Add OpenACC joining and reduction markers just after the loop. */
11952 if (oacc_tail)
11953 gimple_seq_add_seq (&body, oacc_tail);
11955 pop_gimplify_context (new_stmt);
11957 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11958 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11959 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11960 if (BLOCK_VARS (block))
11961 TREE_USED (block) = 1;
11963 gimple_bind_set_body (new_stmt, body);
11964 gimple_omp_set_body (stmt, NULL);
11965 gimple_omp_for_set_pre_body (stmt, NULL);
11968 /* Callback for walk_stmts. Check if the current statement only contains
11969 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11971 static tree
11972 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11973 bool *handled_ops_p,
11974 struct walk_stmt_info *wi)
11976 int *info = (int *) wi->info;
11977 gimple *stmt = gsi_stmt (*gsi_p);
11979 *handled_ops_p = true;
11980 switch (gimple_code (stmt))
11982 WALK_SUBSTMTS;
11984 case GIMPLE_DEBUG:
11985 break;
11986 case GIMPLE_OMP_FOR:
11987 case GIMPLE_OMP_SECTIONS:
11988 *info = *info == 0 ? 1 : -1;
11989 break;
11990 default:
11991 *info = -1;
11992 break;
11994 return NULL;
11997 struct omp_taskcopy_context
11999 /* This field must be at the beginning, as we do "inheritance": Some
12000 callback functions for tree-inline.cc (e.g., omp_copy_decl)
12001 receive a copy_body_data pointer that is up-casted to an
12002 omp_context pointer. */
12003 copy_body_data cb;
12004 omp_context *ctx;
12007 static tree
12008 task_copyfn_copy_decl (tree var, copy_body_data *cb)
12010 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
12012 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
12013 return create_tmp_var (TREE_TYPE (var));
12015 return var;
12018 static tree
12019 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
12021 tree name, new_fields = NULL, type, f;
12023 type = lang_hooks.types.make_type (RECORD_TYPE);
12024 name = DECL_NAME (TYPE_NAME (orig_type));
12025 name = build_decl (gimple_location (tcctx->ctx->stmt),
12026 TYPE_DECL, name, type);
12027 TYPE_NAME (type) = name;
12029 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
12031 tree new_f = copy_node (f);
12032 DECL_CONTEXT (new_f) = type;
12033 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
12034 TREE_CHAIN (new_f) = new_fields;
12035 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
12036 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
12037 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
12038 &tcctx->cb, NULL);
12039 new_fields = new_f;
12040 tcctx->cb.decl_map->put (f, new_f);
12042 TYPE_FIELDS (type) = nreverse (new_fields);
12043 layout_type (type);
12044 return type;
12047 /* Create task copyfn. */
12049 static void
12050 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
12052 struct function *child_cfun;
12053 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
12054 tree record_type, srecord_type, bind, list;
12055 bool record_needs_remap = false, srecord_needs_remap = false;
12056 splay_tree_node n;
12057 struct omp_taskcopy_context tcctx;
12058 location_t loc = gimple_location (task_stmt);
12059 size_t looptempno = 0;
12061 child_fn = gimple_omp_task_copy_fn (task_stmt);
12062 task_cpyfns.safe_push (task_stmt);
12063 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
12064 gcc_assert (child_cfun->cfg == NULL);
12065 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
12067 /* Reset DECL_CONTEXT on function arguments. */
12068 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
12069 DECL_CONTEXT (t) = child_fn;
12071 /* Populate the function. */
12072 push_gimplify_context ();
12073 push_cfun (child_cfun);
12075 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12076 TREE_SIDE_EFFECTS (bind) = 1;
12077 list = NULL;
12078 DECL_SAVED_TREE (child_fn) = bind;
12079 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
12081 /* Remap src and dst argument types if needed. */
12082 record_type = ctx->record_type;
12083 srecord_type = ctx->srecord_type;
12084 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
12085 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
12087 record_needs_remap = true;
12088 break;
12090 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
12091 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
12093 srecord_needs_remap = true;
12094 break;
12097 if (record_needs_remap || srecord_needs_remap)
12099 memset (&tcctx, '\0', sizeof (tcctx));
12100 tcctx.cb.src_fn = ctx->cb.src_fn;
12101 tcctx.cb.dst_fn = child_fn;
12102 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
12103 gcc_checking_assert (tcctx.cb.src_node);
12104 tcctx.cb.dst_node = tcctx.cb.src_node;
12105 tcctx.cb.src_cfun = ctx->cb.src_cfun;
12106 tcctx.cb.copy_decl = task_copyfn_copy_decl;
12107 tcctx.cb.eh_lp_nr = 0;
12108 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
12109 tcctx.cb.decl_map = new hash_map<tree, tree>;
12110 tcctx.ctx = ctx;
12112 if (record_needs_remap)
12113 record_type = task_copyfn_remap_type (&tcctx, record_type);
12114 if (srecord_needs_remap)
12115 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
12117 else
12118 tcctx.cb.decl_map = NULL;
12120 arg = DECL_ARGUMENTS (child_fn);
12121 TREE_TYPE (arg) = build_pointer_type (record_type);
12122 sarg = DECL_CHAIN (arg);
12123 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
12125 /* First pass: initialize temporaries used in record_type and srecord_type
12126 sizes and field offsets. */
12127 if (tcctx.cb.decl_map)
12128 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12129 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12131 tree *p;
12133 decl = OMP_CLAUSE_DECL (c);
12134 p = tcctx.cb.decl_map->get (decl);
12135 if (p == NULL)
12136 continue;
12137 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12138 sf = (tree) n->value;
12139 sf = *tcctx.cb.decl_map->get (sf);
12140 src = build_simple_mem_ref_loc (loc, sarg);
12141 src = omp_build_component_ref (src, sf);
12142 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
12143 append_to_statement_list (t, &list);
12146 /* Second pass: copy shared var pointers and copy construct non-VLA
12147 firstprivate vars. */
12148 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12149 switch (OMP_CLAUSE_CODE (c))
12151 splay_tree_key key;
12152 case OMP_CLAUSE_SHARED:
12153 decl = OMP_CLAUSE_DECL (c);
12154 key = (splay_tree_key) decl;
12155 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
12156 key = (splay_tree_key) &DECL_UID (decl);
12157 n = splay_tree_lookup (ctx->field_map, key);
12158 if (n == NULL)
12159 break;
12160 f = (tree) n->value;
12161 if (tcctx.cb.decl_map)
12162 f = *tcctx.cb.decl_map->get (f);
12163 n = splay_tree_lookup (ctx->sfield_map, key);
12164 sf = (tree) n->value;
12165 if (tcctx.cb.decl_map)
12166 sf = *tcctx.cb.decl_map->get (sf);
12167 src = build_simple_mem_ref_loc (loc, sarg);
12168 src = omp_build_component_ref (src, sf);
12169 dst = build_simple_mem_ref_loc (loc, arg);
12170 dst = omp_build_component_ref (dst, f);
12171 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12172 append_to_statement_list (t, &list);
12173 break;
12174 case OMP_CLAUSE_REDUCTION:
12175 case OMP_CLAUSE_IN_REDUCTION:
12176 decl = OMP_CLAUSE_DECL (c);
12177 if (TREE_CODE (decl) == MEM_REF)
12179 decl = TREE_OPERAND (decl, 0);
12180 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12181 decl = TREE_OPERAND (decl, 0);
12182 if (TREE_CODE (decl) == INDIRECT_REF
12183 || TREE_CODE (decl) == ADDR_EXPR)
12184 decl = TREE_OPERAND (decl, 0);
12186 key = (splay_tree_key) decl;
12187 n = splay_tree_lookup (ctx->field_map, key);
12188 if (n == NULL)
12189 break;
12190 f = (tree) n->value;
12191 if (tcctx.cb.decl_map)
12192 f = *tcctx.cb.decl_map->get (f);
12193 n = splay_tree_lookup (ctx->sfield_map, key);
12194 sf = (tree) n->value;
12195 if (tcctx.cb.decl_map)
12196 sf = *tcctx.cb.decl_map->get (sf);
12197 src = build_simple_mem_ref_loc (loc, sarg);
12198 src = omp_build_component_ref (src, sf);
12199 if (decl != OMP_CLAUSE_DECL (c)
12200 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12201 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12202 src = build_simple_mem_ref_loc (loc, src);
12203 dst = build_simple_mem_ref_loc (loc, arg);
12204 dst = omp_build_component_ref (dst, f);
12205 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12206 append_to_statement_list (t, &list);
12207 break;
12208 case OMP_CLAUSE__LOOPTEMP_:
12209 /* Fields for first two _looptemp_ clauses are initialized by
12210 GOMP_taskloop*, the rest are handled like firstprivate. */
12211 if (looptempno < 2)
12213 looptempno++;
12214 break;
12216 /* FALLTHRU */
12217 case OMP_CLAUSE__REDUCTEMP_:
12218 case OMP_CLAUSE_FIRSTPRIVATE:
12219 decl = OMP_CLAUSE_DECL (c);
12220 if (is_variable_sized (decl))
12221 break;
12222 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12223 if (n == NULL)
12224 break;
12225 f = (tree) n->value;
12226 if (tcctx.cb.decl_map)
12227 f = *tcctx.cb.decl_map->get (f);
12228 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12229 if (n != NULL)
12231 sf = (tree) n->value;
12232 if (tcctx.cb.decl_map)
12233 sf = *tcctx.cb.decl_map->get (sf);
12234 src = build_simple_mem_ref_loc (loc, sarg);
12235 src = omp_build_component_ref (src, sf);
12236 if (use_pointer_for_field (decl, NULL)
12237 || omp_privatize_by_reference (decl))
12238 src = build_simple_mem_ref_loc (loc, src);
12240 else
12241 src = decl;
12242 dst = build_simple_mem_ref_loc (loc, arg);
12243 dst = omp_build_component_ref (dst, f);
12244 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12245 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12246 else
12248 if (ctx->allocate_map)
12249 if (tree *allocatorp = ctx->allocate_map->get (decl))
12251 tree allocator = *allocatorp;
12252 HOST_WIDE_INT ialign = 0;
12253 if (TREE_CODE (allocator) == TREE_LIST)
12255 ialign = tree_to_uhwi (TREE_VALUE (allocator));
12256 allocator = TREE_PURPOSE (allocator);
12258 if (TREE_CODE (allocator) != INTEGER_CST)
12260 n = splay_tree_lookup (ctx->sfield_map,
12261 (splay_tree_key) allocator);
12262 allocator = (tree) n->value;
12263 if (tcctx.cb.decl_map)
12264 allocator = *tcctx.cb.decl_map->get (allocator);
12265 tree a = build_simple_mem_ref_loc (loc, sarg);
12266 allocator = omp_build_component_ref (a, allocator);
12268 allocator = fold_convert (pointer_sized_int_node, allocator);
12269 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12270 tree align = build_int_cst (size_type_node,
12271 MAX (ialign,
12272 DECL_ALIGN_UNIT (decl)));
12273 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12274 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12275 allocator);
12276 ptr = fold_convert (TREE_TYPE (dst), ptr);
12277 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12278 append_to_statement_list (t, &list);
12279 dst = build_simple_mem_ref_loc (loc, dst);
12281 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12283 append_to_statement_list (t, &list);
12284 break;
12285 case OMP_CLAUSE_PRIVATE:
12286 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12287 break;
12288 decl = OMP_CLAUSE_DECL (c);
12289 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12290 f = (tree) n->value;
12291 if (tcctx.cb.decl_map)
12292 f = *tcctx.cb.decl_map->get (f);
12293 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12294 if (n != NULL)
12296 sf = (tree) n->value;
12297 if (tcctx.cb.decl_map)
12298 sf = *tcctx.cb.decl_map->get (sf);
12299 src = build_simple_mem_ref_loc (loc, sarg);
12300 src = omp_build_component_ref (src, sf);
12301 if (use_pointer_for_field (decl, NULL))
12302 src = build_simple_mem_ref_loc (loc, src);
12304 else
12305 src = decl;
12306 dst = build_simple_mem_ref_loc (loc, arg);
12307 dst = omp_build_component_ref (dst, f);
12308 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12309 append_to_statement_list (t, &list);
12310 break;
12311 default:
12312 break;
12315 /* Last pass: handle VLA firstprivates. */
12316 if (tcctx.cb.decl_map)
12317 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12318 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12320 tree ind, ptr, df;
12322 decl = OMP_CLAUSE_DECL (c);
12323 if (!is_variable_sized (decl))
12324 continue;
12325 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12326 if (n == NULL)
12327 continue;
12328 f = (tree) n->value;
12329 f = *tcctx.cb.decl_map->get (f);
12330 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12331 ind = DECL_VALUE_EXPR (decl);
12332 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12333 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12334 n = splay_tree_lookup (ctx->sfield_map,
12335 (splay_tree_key) TREE_OPERAND (ind, 0));
12336 sf = (tree) n->value;
12337 sf = *tcctx.cb.decl_map->get (sf);
12338 src = build_simple_mem_ref_loc (loc, sarg);
12339 src = omp_build_component_ref (src, sf);
12340 src = build_simple_mem_ref_loc (loc, src);
12341 dst = build_simple_mem_ref_loc (loc, arg);
12342 dst = omp_build_component_ref (dst, f);
12343 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12344 append_to_statement_list (t, &list);
12345 n = splay_tree_lookup (ctx->field_map,
12346 (splay_tree_key) TREE_OPERAND (ind, 0));
12347 df = (tree) n->value;
12348 df = *tcctx.cb.decl_map->get (df);
12349 ptr = build_simple_mem_ref_loc (loc, arg);
12350 ptr = omp_build_component_ref (ptr, df);
12351 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12352 build_fold_addr_expr_loc (loc, dst));
12353 append_to_statement_list (t, &list);
12356 t = build1 (RETURN_EXPR, void_type_node, NULL);
12357 append_to_statement_list (t, &list);
12359 if (tcctx.cb.decl_map)
12360 delete tcctx.cb.decl_map;
12361 pop_gimplify_context (NULL);
12362 BIND_EXPR_BODY (bind) = list;
12363 pop_cfun ();
12366 static void
12367 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12369 tree c, clauses;
12370 gimple *g;
12371 size_t cnt[5] = { 0, 0, 0, 0, 0 }, idx = 2, i;
12373 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12374 gcc_assert (clauses);
12375 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12376 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12377 switch (OMP_CLAUSE_DEPEND_KIND (c))
12379 case OMP_CLAUSE_DEPEND_LAST:
12380 /* Lowering already done at gimplification. */
12381 return;
12382 case OMP_CLAUSE_DEPEND_IN:
12383 cnt[2]++;
12384 break;
12385 case OMP_CLAUSE_DEPEND_OUT:
12386 case OMP_CLAUSE_DEPEND_INOUT:
12387 cnt[0]++;
12388 break;
12389 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12390 cnt[1]++;
12391 break;
12392 case OMP_CLAUSE_DEPEND_DEPOBJ:
12393 cnt[3]++;
12394 break;
12395 case OMP_CLAUSE_DEPEND_INOUTSET:
12396 cnt[4]++;
12397 break;
12398 default:
12399 gcc_unreachable ();
12401 if (cnt[1] || cnt[3] || cnt[4])
12402 idx = 5;
12403 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3] + cnt[4];
12404 size_t inoutidx = total + idx;
12405 tree type = build_array_type_nelts (ptr_type_node, total + idx + 2 * cnt[4]);
12406 tree array = create_tmp_var (type);
12407 TREE_ADDRESSABLE (array) = 1;
12408 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12409 NULL_TREE);
12410 if (idx == 5)
12412 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12413 gimple_seq_add_stmt (iseq, g);
12414 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12415 NULL_TREE);
12417 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12418 gimple_seq_add_stmt (iseq, g);
12419 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12421 r = build4 (ARRAY_REF, ptr_type_node, array,
12422 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12423 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12424 gimple_seq_add_stmt (iseq, g);
12426 for (i = 0; i < 5; i++)
12428 if (cnt[i] == 0)
12429 continue;
12430 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12431 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12432 continue;
12433 else
12435 switch (OMP_CLAUSE_DEPEND_KIND (c))
12437 case OMP_CLAUSE_DEPEND_IN:
12438 if (i != 2)
12439 continue;
12440 break;
12441 case OMP_CLAUSE_DEPEND_OUT:
12442 case OMP_CLAUSE_DEPEND_INOUT:
12443 if (i != 0)
12444 continue;
12445 break;
12446 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12447 if (i != 1)
12448 continue;
12449 break;
12450 case OMP_CLAUSE_DEPEND_DEPOBJ:
12451 if (i != 3)
12452 continue;
12453 break;
12454 case OMP_CLAUSE_DEPEND_INOUTSET:
12455 if (i != 4)
12456 continue;
12457 break;
12458 default:
12459 gcc_unreachable ();
12461 tree t = OMP_CLAUSE_DECL (c);
12462 if (i == 4)
12464 t = build4 (ARRAY_REF, ptr_type_node, array,
12465 size_int (inoutidx), NULL_TREE, NULL_TREE);
12466 t = build_fold_addr_expr (t);
12467 inoutidx += 2;
12469 t = fold_convert (ptr_type_node, t);
12470 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12471 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12472 NULL_TREE, NULL_TREE);
12473 g = gimple_build_assign (r, t);
12474 gimple_seq_add_stmt (iseq, g);
12477 if (cnt[4])
12478 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12479 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12480 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_INOUTSET)
12482 tree t = OMP_CLAUSE_DECL (c);
12483 t = fold_convert (ptr_type_node, t);
12484 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12485 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12486 NULL_TREE, NULL_TREE);
12487 g = gimple_build_assign (r, t);
12488 gimple_seq_add_stmt (iseq, g);
12489 t = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
12490 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12491 NULL_TREE, NULL_TREE);
12492 g = gimple_build_assign (r, t);
12493 gimple_seq_add_stmt (iseq, g);
12496 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12497 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12498 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12499 OMP_CLAUSE_CHAIN (c) = *pclauses;
12500 *pclauses = c;
12501 tree clobber = build_clobber (type);
12502 g = gimple_build_assign (array, clobber);
12503 gimple_seq_add_stmt (oseq, g);
12506 /* Lower the OpenMP parallel or task directive in the current statement
12507 in GSI_P. CTX holds context information for the directive. */
12509 static void
12510 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12512 tree clauses;
12513 tree child_fn, t;
12514 gimple *stmt = gsi_stmt (*gsi_p);
12515 gbind *par_bind, *bind, *dep_bind = NULL;
12516 gimple_seq par_body;
12517 location_t loc = gimple_location (stmt);
12519 clauses = gimple_omp_taskreg_clauses (stmt);
12520 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12521 && gimple_omp_task_taskwait_p (stmt))
12523 par_bind = NULL;
12524 par_body = NULL;
12526 else
12528 par_bind
12529 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12530 par_body = gimple_bind_body (par_bind);
12532 child_fn = ctx->cb.dst_fn;
12533 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12534 && !gimple_omp_parallel_combined_p (stmt))
12536 struct walk_stmt_info wi;
12537 int ws_num = 0;
12539 memset (&wi, 0, sizeof (wi));
12540 wi.info = &ws_num;
12541 wi.val_only = true;
12542 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12543 if (ws_num == 1)
12544 gimple_omp_parallel_set_combined_p (stmt, true);
12546 gimple_seq dep_ilist = NULL;
12547 gimple_seq dep_olist = NULL;
12548 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12549 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12551 push_gimplify_context ();
12552 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12553 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12554 &dep_ilist, &dep_olist);
12557 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12558 && gimple_omp_task_taskwait_p (stmt))
12560 if (dep_bind)
12562 gsi_replace (gsi_p, dep_bind, true);
12563 gimple_bind_add_seq (dep_bind, dep_ilist);
12564 gimple_bind_add_stmt (dep_bind, stmt);
12565 gimple_bind_add_seq (dep_bind, dep_olist);
12566 pop_gimplify_context (dep_bind);
12568 return;
12571 if (ctx->srecord_type)
12572 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12574 gimple_seq tskred_ilist = NULL;
12575 gimple_seq tskred_olist = NULL;
12576 if ((is_task_ctx (ctx)
12577 && gimple_omp_task_taskloop_p (ctx->stmt)
12578 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12579 OMP_CLAUSE_REDUCTION))
12580 || (is_parallel_ctx (ctx)
12581 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12582 OMP_CLAUSE__REDUCTEMP_)))
12584 if (dep_bind == NULL)
12586 push_gimplify_context ();
12587 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12589 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12590 : OMP_PARALLEL,
12591 gimple_omp_taskreg_clauses (ctx->stmt),
12592 &tskred_ilist, &tskred_olist);
12595 push_gimplify_context ();
12597 gimple_seq par_olist = NULL;
12598 gimple_seq par_ilist = NULL;
12599 gimple_seq par_rlist = NULL;
12600 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12601 lower_omp (&par_body, ctx);
12602 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12603 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12605 /* Declare all the variables created by mapping and the variables
12606 declared in the scope of the parallel body. */
12607 record_vars_into (ctx->block_vars, child_fn);
12608 maybe_remove_omp_member_access_dummy_vars (par_bind);
12609 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12611 if (ctx->record_type)
12613 ctx->sender_decl
12614 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12615 : ctx->record_type, ".omp_data_o");
12616 DECL_NAMELESS (ctx->sender_decl) = 1;
12617 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12618 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12621 gimple_seq olist = NULL;
12622 gimple_seq ilist = NULL;
12623 lower_send_clauses (clauses, &ilist, &olist, ctx);
12624 lower_send_shared_vars (&ilist, &olist, ctx);
12626 if (ctx->record_type)
12628 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12629 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12630 clobber));
12633 /* Once all the expansions are done, sequence all the different
12634 fragments inside gimple_omp_body. */
12636 gimple_seq new_body = NULL;
12638 if (ctx->record_type)
12640 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12641 /* fixup_child_record_type might have changed receiver_decl's type. */
12642 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12643 gimple_seq_add_stmt (&new_body,
12644 gimple_build_assign (ctx->receiver_decl, t));
12647 gimple_seq_add_seq (&new_body, par_ilist);
12648 gimple_seq_add_seq (&new_body, par_body);
12649 gimple_seq_add_seq (&new_body, par_rlist);
12650 if (ctx->cancellable)
12651 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12652 gimple_seq_add_seq (&new_body, par_olist);
12653 new_body = maybe_catch_exception (new_body);
12654 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12655 gimple_seq_add_stmt (&new_body,
12656 gimple_build_omp_continue (integer_zero_node,
12657 integer_zero_node));
12658 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12659 gimple_omp_set_body (stmt, new_body);
12661 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12662 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12663 else
12664 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12665 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12666 gimple_bind_add_seq (bind, ilist);
12667 gimple_bind_add_stmt (bind, stmt);
12668 gimple_bind_add_seq (bind, olist);
12670 pop_gimplify_context (NULL);
12672 if (dep_bind)
12674 gimple_bind_add_seq (dep_bind, dep_ilist);
12675 gimple_bind_add_seq (dep_bind, tskred_ilist);
12676 gimple_bind_add_stmt (dep_bind, bind);
12677 gimple_bind_add_seq (dep_bind, tskred_olist);
12678 gimple_bind_add_seq (dep_bind, dep_olist);
12679 pop_gimplify_context (dep_bind);
12683 /* Lower the GIMPLE_OMP_TARGET in the current statement
12684 in GSI_P. CTX holds context information for the directive. */
12686 static void
12687 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12689 tree clauses;
12690 tree child_fn, t, c;
12691 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12692 gbind *tgt_bind, *bind, *dep_bind = NULL;
12693 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12694 location_t loc = gimple_location (stmt);
12695 bool offloaded, data_region;
12696 unsigned int map_cnt = 0;
12697 tree in_reduction_clauses = NULL_TREE;
12699 offloaded = is_gimple_omp_offloaded (stmt);
12700 switch (gimple_omp_target_kind (stmt))
12702 case GF_OMP_TARGET_KIND_REGION:
12703 tree *p, *q;
12704 q = &in_reduction_clauses;
12705 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12706 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12708 *q = *p;
12709 q = &OMP_CLAUSE_CHAIN (*q);
12710 *p = OMP_CLAUSE_CHAIN (*p);
12712 else
12713 p = &OMP_CLAUSE_CHAIN (*p);
12714 *q = NULL_TREE;
12715 *p = in_reduction_clauses;
12716 /* FALLTHRU */
12717 case GF_OMP_TARGET_KIND_UPDATE:
12718 case GF_OMP_TARGET_KIND_ENTER_DATA:
12719 case GF_OMP_TARGET_KIND_EXIT_DATA:
12720 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12721 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12722 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12723 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12724 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12725 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12726 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12727 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12728 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12729 data_region = false;
12730 break;
12731 case GF_OMP_TARGET_KIND_DATA:
12732 case GF_OMP_TARGET_KIND_OACC_DATA:
12733 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12734 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12735 data_region = true;
12736 break;
12737 default:
12738 gcc_unreachable ();
12741 /* Ensure that requires map is written via output_offload_tables, even if only
12742 'target (enter/exit) data' is used in the translation unit. */
12743 if (ENABLE_OFFLOADING && (omp_requires_mask & OMP_REQUIRES_TARGET_USED))
12744 g->have_offload = true;
12746 clauses = gimple_omp_target_clauses (stmt);
12748 gimple_seq dep_ilist = NULL;
12749 gimple_seq dep_olist = NULL;
12750 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12751 if (has_depend || in_reduction_clauses)
12753 push_gimplify_context ();
12754 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12755 if (has_depend)
12756 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12757 &dep_ilist, &dep_olist);
12758 if (in_reduction_clauses)
12759 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12760 ctx, NULL);
12763 tgt_bind = NULL;
12764 tgt_body = NULL;
12765 if (offloaded)
12767 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12768 tgt_body = gimple_bind_body (tgt_bind);
12770 else if (data_region)
12771 tgt_body = gimple_omp_body (stmt);
12772 child_fn = ctx->cb.dst_fn;
12774 push_gimplify_context ();
12775 fplist = NULL;
12777 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12778 switch (OMP_CLAUSE_CODE (c))
12780 tree var, x;
12782 default:
12783 break;
12784 case OMP_CLAUSE_MAP:
12785 #if CHECKING_P
12786 /* First check what we're prepared to handle in the following. */
12787 switch (OMP_CLAUSE_MAP_KIND (c))
12789 case GOMP_MAP_ALLOC:
12790 case GOMP_MAP_TO:
12791 case GOMP_MAP_FROM:
12792 case GOMP_MAP_TOFROM:
12793 case GOMP_MAP_POINTER:
12794 case GOMP_MAP_TO_PSET:
12795 case GOMP_MAP_DELETE:
12796 case GOMP_MAP_RELEASE:
12797 case GOMP_MAP_ALWAYS_TO:
12798 case GOMP_MAP_ALWAYS_FROM:
12799 case GOMP_MAP_ALWAYS_TOFROM:
12800 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12801 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12802 case GOMP_MAP_STRUCT:
12803 case GOMP_MAP_ALWAYS_POINTER:
12804 case GOMP_MAP_ATTACH:
12805 case GOMP_MAP_DETACH:
12806 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
12807 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
12808 break;
12809 case GOMP_MAP_IF_PRESENT:
12810 case GOMP_MAP_FORCE_ALLOC:
12811 case GOMP_MAP_FORCE_TO:
12812 case GOMP_MAP_FORCE_FROM:
12813 case GOMP_MAP_FORCE_TOFROM:
12814 case GOMP_MAP_FORCE_PRESENT:
12815 case GOMP_MAP_FORCE_DEVICEPTR:
12816 case GOMP_MAP_DEVICE_RESIDENT:
12817 case GOMP_MAP_LINK:
12818 case GOMP_MAP_FORCE_DETACH:
12819 gcc_assert (is_gimple_omp_oacc (stmt));
12820 break;
12821 default:
12822 gcc_unreachable ();
12824 #endif
12825 /* FALLTHRU */
12826 case OMP_CLAUSE_TO:
12827 case OMP_CLAUSE_FROM:
12828 oacc_firstprivate:
12829 var = OMP_CLAUSE_DECL (c);
12830 if (!DECL_P (var))
12832 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12833 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12834 && (OMP_CLAUSE_MAP_KIND (c)
12835 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12836 map_cnt++;
12837 continue;
12840 if (DECL_SIZE (var)
12841 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12843 tree var2 = DECL_VALUE_EXPR (var);
12844 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12845 var2 = TREE_OPERAND (var2, 0);
12846 gcc_assert (DECL_P (var2));
12847 var = var2;
12850 if (offloaded
12851 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12852 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12853 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12855 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12857 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12858 && varpool_node::get_create (var)->offloadable)
12859 continue;
12861 tree type = build_pointer_type (TREE_TYPE (var));
12862 tree new_var = lookup_decl (var, ctx);
12863 x = create_tmp_var_raw (type, get_name (new_var));
12864 gimple_add_tmp_var (x);
12865 x = build_simple_mem_ref (x);
12866 SET_DECL_VALUE_EXPR (new_var, x);
12867 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12869 continue;
12872 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12873 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12874 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12875 && is_omp_target (stmt))
12877 gcc_assert (maybe_lookup_field (c, ctx));
12878 map_cnt++;
12879 continue;
12882 if (!maybe_lookup_field (var, ctx))
12883 continue;
12885 /* Don't remap compute constructs' reduction variables, because the
12886 intermediate result must be local to each gang. */
12887 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12888 && is_gimple_omp_oacc (ctx->stmt)
12889 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12891 x = build_receiver_ref (var, true, ctx);
12892 tree new_var = lookup_decl (var, ctx);
12894 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12895 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12896 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12897 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12898 x = build_simple_mem_ref (x);
12899 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12901 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12902 if (omp_privatize_by_reference (new_var)
12903 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12904 || DECL_BY_REFERENCE (var)))
12906 /* Create a local object to hold the instance
12907 value. */
12908 tree type = TREE_TYPE (TREE_TYPE (new_var));
12909 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12910 tree inst = create_tmp_var (type, id);
12911 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12912 x = build_fold_addr_expr (inst);
12914 gimplify_assign (new_var, x, &fplist);
12916 else if (DECL_P (new_var))
12918 SET_DECL_VALUE_EXPR (new_var, x);
12919 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12921 else
12922 gcc_unreachable ();
12924 map_cnt++;
12925 break;
12927 case OMP_CLAUSE_FIRSTPRIVATE:
12928 omp_firstprivate_recv:
12929 gcc_checking_assert (offloaded);
12930 if (is_gimple_omp_oacc (ctx->stmt))
12932 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12933 gcc_checking_assert (!is_oacc_kernels (ctx));
12934 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12935 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12937 goto oacc_firstprivate;
12939 map_cnt++;
12940 var = OMP_CLAUSE_DECL (c);
12941 if (!omp_privatize_by_reference (var)
12942 && !is_gimple_reg_type (TREE_TYPE (var)))
12944 tree new_var = lookup_decl (var, ctx);
12945 if (is_variable_sized (var))
12947 tree pvar = DECL_VALUE_EXPR (var);
12948 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12949 pvar = TREE_OPERAND (pvar, 0);
12950 gcc_assert (DECL_P (pvar));
12951 tree new_pvar = lookup_decl (pvar, ctx);
12952 x = build_fold_indirect_ref (new_pvar);
12953 TREE_THIS_NOTRAP (x) = 1;
12955 else
12956 x = build_receiver_ref (var, true, ctx);
12957 SET_DECL_VALUE_EXPR (new_var, x);
12958 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12960 /* Fortran array descriptors: firstprivate of data + attach. */
12961 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
12962 && lang_hooks.decls.omp_array_data (var, true))
12963 map_cnt += 2;
12964 break;
12966 case OMP_CLAUSE_PRIVATE:
12967 gcc_checking_assert (offloaded);
12968 if (is_gimple_omp_oacc (ctx->stmt))
12970 /* No 'private' clauses on OpenACC 'kernels'. */
12971 gcc_checking_assert (!is_oacc_kernels (ctx));
12972 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12973 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12975 break;
12977 var = OMP_CLAUSE_DECL (c);
12978 if (is_variable_sized (var))
12980 tree new_var = lookup_decl (var, ctx);
12981 tree pvar = DECL_VALUE_EXPR (var);
12982 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12983 pvar = TREE_OPERAND (pvar, 0);
12984 gcc_assert (DECL_P (pvar));
12985 tree new_pvar = lookup_decl (pvar, ctx);
12986 x = build_fold_indirect_ref (new_pvar);
12987 TREE_THIS_NOTRAP (x) = 1;
12988 SET_DECL_VALUE_EXPR (new_var, x);
12989 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12991 break;
12993 case OMP_CLAUSE_USE_DEVICE_PTR:
12994 case OMP_CLAUSE_USE_DEVICE_ADDR:
12995 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12996 case OMP_CLAUSE_IS_DEVICE_PTR:
12997 var = OMP_CLAUSE_DECL (c);
12998 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13000 while (TREE_CODE (var) == INDIRECT_REF
13001 || TREE_CODE (var) == ARRAY_REF)
13002 var = TREE_OPERAND (var, 0);
13003 if (lang_hooks.decls.omp_array_data (var, true))
13004 goto omp_firstprivate_recv;
13006 map_cnt++;
13007 if (is_variable_sized (var))
13009 tree new_var = lookup_decl (var, ctx);
13010 tree pvar = DECL_VALUE_EXPR (var);
13011 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13012 pvar = TREE_OPERAND (pvar, 0);
13013 gcc_assert (DECL_P (pvar));
13014 tree new_pvar = lookup_decl (pvar, ctx);
13015 x = build_fold_indirect_ref (new_pvar);
13016 TREE_THIS_NOTRAP (x) = 1;
13017 SET_DECL_VALUE_EXPR (new_var, x);
13018 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
13020 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13021 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13022 && !omp_privatize_by_reference (var)
13023 && !omp_is_allocatable_or_ptr (var)
13024 && !lang_hooks.decls.omp_array_data (var, true))
13025 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13027 tree new_var = lookup_decl (var, ctx);
13028 tree type = build_pointer_type (TREE_TYPE (var));
13029 x = create_tmp_var_raw (type, get_name (new_var));
13030 gimple_add_tmp_var (x);
13031 x = build_simple_mem_ref (x);
13032 SET_DECL_VALUE_EXPR (new_var, x);
13033 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
13035 else
13037 tree new_var = lookup_decl (var, ctx);
13038 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
13039 gimple_add_tmp_var (x);
13040 SET_DECL_VALUE_EXPR (new_var, x);
13041 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
13043 break;
13046 if (offloaded)
13048 target_nesting_level++;
13049 lower_omp (&tgt_body, ctx);
13050 target_nesting_level--;
13052 else if (data_region)
13053 lower_omp (&tgt_body, ctx);
13055 if (offloaded)
13057 /* Declare all the variables created by mapping and the variables
13058 declared in the scope of the target body. */
13059 record_vars_into (ctx->block_vars, child_fn);
13060 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
13061 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
13064 olist = NULL;
13065 ilist = NULL;
13066 if (ctx->record_type)
13068 ctx->sender_decl
13069 = create_tmp_var (ctx->record_type, ".omp_data_arr");
13070 DECL_NAMELESS (ctx->sender_decl) = 1;
13071 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
13072 t = make_tree_vec (3);
13073 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
13074 TREE_VEC_ELT (t, 1)
13075 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
13076 ".omp_data_sizes");
13077 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
13078 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
13079 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
13080 tree tkind_type = short_unsigned_type_node;
13081 int talign_shift = 8;
13082 TREE_VEC_ELT (t, 2)
13083 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
13084 ".omp_data_kinds");
13085 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
13086 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
13087 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
13088 gimple_omp_target_set_data_arg (stmt, t);
13090 vec<constructor_elt, va_gc> *vsize;
13091 vec<constructor_elt, va_gc> *vkind;
13092 vec_alloc (vsize, map_cnt);
13093 vec_alloc (vkind, map_cnt);
13094 unsigned int map_idx = 0;
13096 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13097 switch (OMP_CLAUSE_CODE (c))
13099 tree ovar, nc, s, purpose, var, x, type;
13100 unsigned int talign;
13102 default:
13103 break;
13105 case OMP_CLAUSE_MAP:
13106 case OMP_CLAUSE_TO:
13107 case OMP_CLAUSE_FROM:
13108 oacc_firstprivate_map:
13109 nc = c;
13110 ovar = OMP_CLAUSE_DECL (c);
13111 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13112 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13113 || (OMP_CLAUSE_MAP_KIND (c)
13114 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13115 break;
13116 if (!DECL_P (ovar))
13118 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13119 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
13121 nc = OMP_CLAUSE_CHAIN (c);
13122 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
13123 == get_base_address (ovar));
13124 ovar = OMP_CLAUSE_DECL (nc);
13126 else
13128 tree x = build_sender_ref (ovar, ctx);
13129 tree v = ovar;
13130 if (in_reduction_clauses
13131 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13132 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13134 v = unshare_expr (v);
13135 tree *p = &v;
13136 while (handled_component_p (*p)
13137 || TREE_CODE (*p) == INDIRECT_REF
13138 || TREE_CODE (*p) == ADDR_EXPR
13139 || TREE_CODE (*p) == MEM_REF
13140 || TREE_CODE (*p) == NON_LVALUE_EXPR)
13141 p = &TREE_OPERAND (*p, 0);
13142 tree d = *p;
13143 if (is_variable_sized (d))
13145 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13146 d = DECL_VALUE_EXPR (d);
13147 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13148 d = TREE_OPERAND (d, 0);
13149 gcc_assert (DECL_P (d));
13151 splay_tree_key key
13152 = (splay_tree_key) &DECL_CONTEXT (d);
13153 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13154 key)->value;
13155 if (d == *p)
13156 *p = nd;
13157 else
13158 *p = build_fold_indirect_ref (nd);
13160 v = build_fold_addr_expr_with_type (v, ptr_type_node);
13161 gimplify_assign (x, v, &ilist);
13162 nc = NULL_TREE;
13165 else
13167 if (DECL_SIZE (ovar)
13168 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
13170 tree ovar2 = DECL_VALUE_EXPR (ovar);
13171 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
13172 ovar2 = TREE_OPERAND (ovar2, 0);
13173 gcc_assert (DECL_P (ovar2));
13174 ovar = ovar2;
13176 if (!maybe_lookup_field (ovar, ctx)
13177 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13178 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13179 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
13180 continue;
13183 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
13184 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
13185 talign = DECL_ALIGN_UNIT (ovar);
13187 var = NULL_TREE;
13188 if (nc)
13190 if (in_reduction_clauses
13191 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13192 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13194 tree d = ovar;
13195 if (is_variable_sized (d))
13197 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13198 d = DECL_VALUE_EXPR (d);
13199 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13200 d = TREE_OPERAND (d, 0);
13201 gcc_assert (DECL_P (d));
13203 splay_tree_key key
13204 = (splay_tree_key) &DECL_CONTEXT (d);
13205 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13206 key)->value;
13207 if (d == ovar)
13208 var = nd;
13209 else
13210 var = build_fold_indirect_ref (nd);
13212 else
13213 var = lookup_decl_in_outer_ctx (ovar, ctx);
13215 if (nc
13216 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13217 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13218 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13219 && is_omp_target (stmt))
13221 x = build_sender_ref (c, ctx);
13222 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13224 else if (nc)
13226 x = build_sender_ref (ovar, ctx);
13228 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13229 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13230 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13231 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13233 gcc_assert (offloaded);
13234 tree avar
13235 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13236 mark_addressable (avar);
13237 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13238 talign = DECL_ALIGN_UNIT (avar);
13239 avar = build_fold_addr_expr (avar);
13240 gimplify_assign (x, avar, &ilist);
13242 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13244 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13245 if (!omp_privatize_by_reference (var))
13247 if (is_gimple_reg (var)
13248 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13249 suppress_warning (var);
13250 var = build_fold_addr_expr (var);
13252 else
13253 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13254 gimplify_assign (x, var, &ilist);
13256 else if (is_gimple_reg (var))
13258 gcc_assert (offloaded);
13259 tree avar = create_tmp_var (TREE_TYPE (var));
13260 mark_addressable (avar);
13261 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13262 if (GOMP_MAP_COPY_TO_P (map_kind)
13263 || map_kind == GOMP_MAP_POINTER
13264 || map_kind == GOMP_MAP_TO_PSET
13265 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13267 /* If we need to initialize a temporary
13268 with VAR because it is not addressable, and
13269 the variable hasn't been initialized yet, then
13270 we'll get a warning for the store to avar.
13271 Don't warn in that case, the mapping might
13272 be implicit. */
13273 suppress_warning (var, OPT_Wuninitialized);
13274 gimplify_assign (avar, var, &ilist);
13276 avar = build_fold_addr_expr (avar);
13277 gimplify_assign (x, avar, &ilist);
13278 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13279 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13280 && !TYPE_READONLY (TREE_TYPE (var)))
13282 x = unshare_expr (x);
13283 x = build_simple_mem_ref (x);
13284 gimplify_assign (var, x, &olist);
13287 else
13289 /* While MAP is handled explicitly by the FE,
13290 for 'target update', only the identified is passed. */
13291 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13292 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13293 && (omp_is_allocatable_or_ptr (var)
13294 && omp_check_optional_argument (var, false)))
13295 var = build_fold_indirect_ref (var);
13296 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13297 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13298 || (!omp_is_allocatable_or_ptr (var)
13299 && !omp_check_optional_argument (var, false)))
13300 var = build_fold_addr_expr (var);
13301 gimplify_assign (x, var, &ilist);
13304 s = NULL_TREE;
13305 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13307 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13308 s = TREE_TYPE (ovar);
13309 if (TREE_CODE (s) == REFERENCE_TYPE
13310 || omp_check_optional_argument (ovar, false))
13311 s = TREE_TYPE (s);
13312 s = TYPE_SIZE_UNIT (s);
13314 else
13315 s = OMP_CLAUSE_SIZE (c);
13316 if (s == NULL_TREE)
13317 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13318 s = fold_convert (size_type_node, s);
13319 purpose = size_int (map_idx++);
13320 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13321 if (TREE_CODE (s) != INTEGER_CST)
13322 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13324 unsigned HOST_WIDE_INT tkind, tkind_zero;
13325 switch (OMP_CLAUSE_CODE (c))
13327 case OMP_CLAUSE_MAP:
13328 tkind = OMP_CLAUSE_MAP_KIND (c);
13329 tkind_zero = tkind;
13330 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13331 switch (tkind)
13333 case GOMP_MAP_ALLOC:
13334 case GOMP_MAP_IF_PRESENT:
13335 case GOMP_MAP_TO:
13336 case GOMP_MAP_FROM:
13337 case GOMP_MAP_TOFROM:
13338 case GOMP_MAP_ALWAYS_TO:
13339 case GOMP_MAP_ALWAYS_FROM:
13340 case GOMP_MAP_ALWAYS_TOFROM:
13341 case GOMP_MAP_RELEASE:
13342 case GOMP_MAP_FORCE_TO:
13343 case GOMP_MAP_FORCE_FROM:
13344 case GOMP_MAP_FORCE_TOFROM:
13345 case GOMP_MAP_FORCE_PRESENT:
13346 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13347 break;
13348 case GOMP_MAP_DELETE:
13349 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13350 default:
13351 break;
13353 if (tkind_zero != tkind)
13355 if (integer_zerop (s))
13356 tkind = tkind_zero;
13357 else if (integer_nonzerop (s))
13358 tkind_zero = tkind;
13360 if (tkind_zero == tkind
13361 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c)
13362 && (((tkind & GOMP_MAP_FLAG_SPECIAL_BITS)
13363 & ~GOMP_MAP_IMPLICIT)
13364 == 0))
13366 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13367 bits are not interfered by other special bit encodings,
13368 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13369 to see. */
13370 tkind |= GOMP_MAP_IMPLICIT;
13371 tkind_zero = tkind;
13373 break;
13374 case OMP_CLAUSE_FIRSTPRIVATE:
13375 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13376 tkind = GOMP_MAP_TO;
13377 tkind_zero = tkind;
13378 break;
13379 case OMP_CLAUSE_TO:
13380 tkind = GOMP_MAP_TO;
13381 tkind_zero = tkind;
13382 break;
13383 case OMP_CLAUSE_FROM:
13384 tkind = GOMP_MAP_FROM;
13385 tkind_zero = tkind;
13386 break;
13387 default:
13388 gcc_unreachable ();
13390 gcc_checking_assert (tkind
13391 < (HOST_WIDE_INT_C (1U) << talign_shift));
13392 gcc_checking_assert (tkind_zero
13393 < (HOST_WIDE_INT_C (1U) << talign_shift));
13394 talign = ceil_log2 (talign);
13395 tkind |= talign << talign_shift;
13396 tkind_zero |= talign << talign_shift;
13397 gcc_checking_assert (tkind
13398 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13399 gcc_checking_assert (tkind_zero
13400 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13401 if (tkind == tkind_zero)
13402 x = build_int_cstu (tkind_type, tkind);
13403 else
13405 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13406 x = build3 (COND_EXPR, tkind_type,
13407 fold_build2 (EQ_EXPR, boolean_type_node,
13408 unshare_expr (s), size_zero_node),
13409 build_int_cstu (tkind_type, tkind_zero),
13410 build_int_cstu (tkind_type, tkind));
13412 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13413 if (nc && nc != c)
13414 c = nc;
13415 break;
13417 case OMP_CLAUSE_FIRSTPRIVATE:
13418 omp_has_device_addr_descr:
13419 if (is_gimple_omp_oacc (ctx->stmt))
13420 goto oacc_firstprivate_map;
13421 ovar = OMP_CLAUSE_DECL (c);
13422 if (omp_privatize_by_reference (ovar))
13423 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13424 else
13425 talign = DECL_ALIGN_UNIT (ovar);
13426 var = lookup_decl_in_outer_ctx (ovar, ctx);
13427 x = build_sender_ref (ovar, ctx);
13428 tkind = GOMP_MAP_FIRSTPRIVATE;
13429 type = TREE_TYPE (ovar);
13430 if (omp_privatize_by_reference (ovar))
13431 type = TREE_TYPE (type);
13432 if ((INTEGRAL_TYPE_P (type)
13433 && TYPE_PRECISION (type) <= POINTER_SIZE)
13434 || TREE_CODE (type) == POINTER_TYPE)
13436 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13437 tree t = var;
13438 if (omp_privatize_by_reference (var))
13439 t = build_simple_mem_ref (var);
13440 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13441 suppress_warning (var);
13442 if (TREE_CODE (type) != POINTER_TYPE)
13443 t = fold_convert (pointer_sized_int_node, t);
13444 t = fold_convert (TREE_TYPE (x), t);
13445 gimplify_assign (x, t, &ilist);
13447 else if (omp_privatize_by_reference (var))
13448 gimplify_assign (x, var, &ilist);
13449 else if (is_gimple_reg (var))
13451 tree avar = create_tmp_var (TREE_TYPE (var));
13452 mark_addressable (avar);
13453 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13454 suppress_warning (var);
13455 gimplify_assign (avar, var, &ilist);
13456 avar = build_fold_addr_expr (avar);
13457 gimplify_assign (x, avar, &ilist);
13459 else
13461 var = build_fold_addr_expr (var);
13462 gimplify_assign (x, var, &ilist);
13464 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13465 s = size_int (0);
13466 else if (omp_privatize_by_reference (ovar))
13467 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13468 else
13469 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13470 s = fold_convert (size_type_node, s);
13471 purpose = size_int (map_idx++);
13472 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13473 if (TREE_CODE (s) != INTEGER_CST)
13474 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13476 gcc_checking_assert (tkind
13477 < (HOST_WIDE_INT_C (1U) << talign_shift));
13478 talign = ceil_log2 (talign);
13479 tkind |= talign << talign_shift;
13480 gcc_checking_assert (tkind
13481 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13482 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13483 build_int_cstu (tkind_type, tkind));
13484 /* Fortran array descriptors: firstprivate of data + attach. */
13485 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13486 && lang_hooks.decls.omp_array_data (ovar, true))
13488 tree not_null_lb, null_lb, after_lb;
13489 tree var1, var2, size1, size2;
13490 tree present = omp_check_optional_argument (ovar, true);
13491 if (present)
13493 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13494 not_null_lb = create_artificial_label (clause_loc);
13495 null_lb = create_artificial_label (clause_loc);
13496 after_lb = create_artificial_label (clause_loc);
13497 gimple_seq seq = NULL;
13498 present = force_gimple_operand (present, &seq, true,
13499 NULL_TREE);
13500 gimple_seq_add_seq (&ilist, seq);
13501 gimple_seq_add_stmt (&ilist,
13502 gimple_build_cond_from_tree (present,
13503 not_null_lb, null_lb));
13504 gimple_seq_add_stmt (&ilist,
13505 gimple_build_label (not_null_lb));
13507 var1 = lang_hooks.decls.omp_array_data (var, false);
13508 size1 = lang_hooks.decls.omp_array_size (var, &ilist);
13509 var2 = build_fold_addr_expr (x);
13510 if (!POINTER_TYPE_P (TREE_TYPE (var)))
13511 var = build_fold_addr_expr (var);
13512 size2 = fold_build2 (POINTER_DIFF_EXPR, ssizetype,
13513 build_fold_addr_expr (var1), var);
13514 size2 = fold_convert (sizetype, size2);
13515 if (present)
13517 tree tmp = create_tmp_var (TREE_TYPE (var1));
13518 gimplify_assign (tmp, var1, &ilist);
13519 var1 = tmp;
13520 tmp = create_tmp_var (TREE_TYPE (var2));
13521 gimplify_assign (tmp, var2, &ilist);
13522 var2 = tmp;
13523 tmp = create_tmp_var (TREE_TYPE (size1));
13524 gimplify_assign (tmp, size1, &ilist);
13525 size1 = tmp;
13526 tmp = create_tmp_var (TREE_TYPE (size2));
13527 gimplify_assign (tmp, size2, &ilist);
13528 size2 = tmp;
13529 gimple_seq_add_stmt (&ilist, gimple_build_goto (after_lb));
13530 gimple_seq_add_stmt (&ilist, gimple_build_label (null_lb));
13531 gimplify_assign (var1, null_pointer_node, &ilist);
13532 gimplify_assign (var2, null_pointer_node, &ilist);
13533 gimplify_assign (size1, size_zero_node, &ilist);
13534 gimplify_assign (size2, size_zero_node, &ilist);
13535 gimple_seq_add_stmt (&ilist, gimple_build_label (after_lb));
13537 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13538 gimplify_assign (x, var1, &ilist);
13539 tkind = GOMP_MAP_FIRSTPRIVATE;
13540 talign = DECL_ALIGN_UNIT (ovar);
13541 talign = ceil_log2 (talign);
13542 tkind |= talign << talign_shift;
13543 gcc_checking_assert (tkind
13544 <= tree_to_uhwi (
13545 TYPE_MAX_VALUE (tkind_type)));
13546 purpose = size_int (map_idx++);
13547 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size1);
13548 if (TREE_CODE (size1) != INTEGER_CST)
13549 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13550 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13551 build_int_cstu (tkind_type, tkind));
13552 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13553 gimplify_assign (x, var2, &ilist);
13554 tkind = GOMP_MAP_ATTACH;
13555 purpose = size_int (map_idx++);
13556 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size2);
13557 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13558 build_int_cstu (tkind_type, tkind));
13560 break;
13562 case OMP_CLAUSE_USE_DEVICE_PTR:
13563 case OMP_CLAUSE_USE_DEVICE_ADDR:
13564 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13565 case OMP_CLAUSE_IS_DEVICE_PTR:
13566 ovar = OMP_CLAUSE_DECL (c);
13567 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13569 if (lang_hooks.decls.omp_array_data (ovar, true))
13570 goto omp_has_device_addr_descr;
13571 while (TREE_CODE (ovar) == INDIRECT_REF
13572 || TREE_CODE (ovar) == ARRAY_REF)
13573 ovar = TREE_OPERAND (ovar, 0);
13575 var = lookup_decl_in_outer_ctx (ovar, ctx);
13577 if (lang_hooks.decls.omp_array_data (ovar, true))
13579 tkind = ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13580 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13581 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13582 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13584 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13585 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13587 tkind = GOMP_MAP_USE_DEVICE_PTR;
13588 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13590 else
13592 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13593 x = build_sender_ref (ovar, ctx);
13596 if (is_gimple_omp_oacc (ctx->stmt))
13598 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13600 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13601 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13604 type = TREE_TYPE (ovar);
13605 if (lang_hooks.decls.omp_array_data (ovar, true))
13606 var = lang_hooks.decls.omp_array_data (var, false);
13607 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13608 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13609 && !omp_privatize_by_reference (ovar)
13610 && !omp_is_allocatable_or_ptr (ovar))
13611 || TREE_CODE (type) == ARRAY_TYPE)
13612 var = build_fold_addr_expr (var);
13613 else
13615 if (omp_privatize_by_reference (ovar)
13616 || omp_check_optional_argument (ovar, false)
13617 || omp_is_allocatable_or_ptr (ovar))
13619 type = TREE_TYPE (type);
13620 if (POINTER_TYPE_P (type)
13621 && TREE_CODE (type) != ARRAY_TYPE
13622 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13623 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13624 && !omp_is_allocatable_or_ptr (ovar))
13625 || (omp_privatize_by_reference (ovar)
13626 && omp_is_allocatable_or_ptr (ovar))))
13627 var = build_simple_mem_ref (var);
13628 var = fold_convert (TREE_TYPE (x), var);
13631 tree present;
13632 present = omp_check_optional_argument (ovar, true);
13633 if (present)
13635 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13636 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13637 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13638 tree new_x = unshare_expr (x);
13639 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13640 fb_rvalue);
13641 gcond *cond = gimple_build_cond_from_tree (present,
13642 notnull_label,
13643 null_label);
13644 gimple_seq_add_stmt (&ilist, cond);
13645 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13646 gimplify_assign (new_x, null_pointer_node, &ilist);
13647 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13648 gimple_seq_add_stmt (&ilist,
13649 gimple_build_label (notnull_label));
13650 gimplify_assign (x, var, &ilist);
13651 gimple_seq_add_stmt (&ilist,
13652 gimple_build_label (opt_arg_label));
13654 else
13655 gimplify_assign (x, var, &ilist);
13656 s = size_int (0);
13657 purpose = size_int (map_idx++);
13658 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13659 gcc_checking_assert (tkind
13660 < (HOST_WIDE_INT_C (1U) << talign_shift));
13661 gcc_checking_assert (tkind
13662 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13663 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13664 build_int_cstu (tkind_type, tkind));
13665 break;
13668 gcc_assert (map_idx == map_cnt);
13670 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13671 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13672 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13673 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13674 for (int i = 1; i <= 2; i++)
13675 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13677 gimple_seq initlist = NULL;
13678 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13679 TREE_VEC_ELT (t, i)),
13680 &initlist, true, NULL_TREE);
13681 gimple_seq_add_seq (&ilist, initlist);
13683 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13684 gimple_seq_add_stmt (&olist,
13685 gimple_build_assign (TREE_VEC_ELT (t, i),
13686 clobber));
13688 else if (omp_maybe_offloaded_ctx (ctx->outer))
13690 tree id = get_identifier ("omp declare target");
13691 tree decl = TREE_VEC_ELT (t, i);
13692 DECL_ATTRIBUTES (decl)
13693 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13694 varpool_node *node = varpool_node::get (decl);
13695 if (node)
13697 node->offloadable = 1;
13698 if (ENABLE_OFFLOADING)
13700 g->have_offload = true;
13701 vec_safe_push (offload_vars, t);
13706 tree clobber = build_clobber (ctx->record_type);
13707 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13708 clobber));
13711 /* Once all the expansions are done, sequence all the different
13712 fragments inside gimple_omp_body. */
13714 new_body = NULL;
13716 if (offloaded
13717 && ctx->record_type)
13719 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13720 /* fixup_child_record_type might have changed receiver_decl's type. */
13721 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13722 gimple_seq_add_stmt (&new_body,
13723 gimple_build_assign (ctx->receiver_decl, t));
13725 gimple_seq_add_seq (&new_body, fplist);
13727 if (offloaded || data_region)
13729 tree prev = NULL_TREE;
13730 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13731 switch (OMP_CLAUSE_CODE (c))
13733 tree var, x;
13734 default:
13735 break;
13736 case OMP_CLAUSE_FIRSTPRIVATE:
13737 omp_firstprivatize_data_region:
13738 if (is_gimple_omp_oacc (ctx->stmt))
13739 break;
13740 var = OMP_CLAUSE_DECL (c);
13741 if (omp_privatize_by_reference (var)
13742 || is_gimple_reg_type (TREE_TYPE (var)))
13744 tree new_var = lookup_decl (var, ctx);
13745 tree type;
13746 type = TREE_TYPE (var);
13747 if (omp_privatize_by_reference (var))
13748 type = TREE_TYPE (type);
13749 if ((INTEGRAL_TYPE_P (type)
13750 && TYPE_PRECISION (type) <= POINTER_SIZE)
13751 || TREE_CODE (type) == POINTER_TYPE)
13753 x = build_receiver_ref (var, false, ctx);
13754 if (TREE_CODE (type) != POINTER_TYPE)
13755 x = fold_convert (pointer_sized_int_node, x);
13756 x = fold_convert (type, x);
13757 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13758 fb_rvalue);
13759 if (omp_privatize_by_reference (var))
13761 tree v = create_tmp_var_raw (type, get_name (var));
13762 gimple_add_tmp_var (v);
13763 TREE_ADDRESSABLE (v) = 1;
13764 gimple_seq_add_stmt (&new_body,
13765 gimple_build_assign (v, x));
13766 x = build_fold_addr_expr (v);
13768 gimple_seq_add_stmt (&new_body,
13769 gimple_build_assign (new_var, x));
13771 else
13773 bool by_ref = !omp_privatize_by_reference (var);
13774 x = build_receiver_ref (var, by_ref, ctx);
13775 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13776 fb_rvalue);
13777 gimple_seq_add_stmt (&new_body,
13778 gimple_build_assign (new_var, x));
13781 else if (is_variable_sized (var))
13783 tree pvar = DECL_VALUE_EXPR (var);
13784 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13785 pvar = TREE_OPERAND (pvar, 0);
13786 gcc_assert (DECL_P (pvar));
13787 tree new_var = lookup_decl (pvar, ctx);
13788 x = build_receiver_ref (var, false, ctx);
13789 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13790 gimple_seq_add_stmt (&new_body,
13791 gimple_build_assign (new_var, x));
13793 break;
13794 case OMP_CLAUSE_PRIVATE:
13795 if (is_gimple_omp_oacc (ctx->stmt))
13796 break;
13797 var = OMP_CLAUSE_DECL (c);
13798 if (omp_privatize_by_reference (var))
13800 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13801 tree new_var = lookup_decl (var, ctx);
13802 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13803 if (TREE_CONSTANT (x))
13805 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13806 get_name (var));
13807 gimple_add_tmp_var (x);
13808 TREE_ADDRESSABLE (x) = 1;
13809 x = build_fold_addr_expr_loc (clause_loc, x);
13811 else
13812 break;
13814 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13815 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13816 gimple_seq_add_stmt (&new_body,
13817 gimple_build_assign (new_var, x));
13819 break;
13820 case OMP_CLAUSE_USE_DEVICE_PTR:
13821 case OMP_CLAUSE_USE_DEVICE_ADDR:
13822 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13823 case OMP_CLAUSE_IS_DEVICE_PTR:
13824 tree new_var;
13825 gimple_seq assign_body;
13826 bool is_array_data;
13827 bool do_optional_check;
13828 assign_body = NULL;
13829 do_optional_check = false;
13830 var = OMP_CLAUSE_DECL (c);
13831 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13832 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR && is_array_data)
13833 goto omp_firstprivatize_data_region;
13835 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13836 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13837 x = build_sender_ref (is_array_data
13838 ? (splay_tree_key) &DECL_NAME (var)
13839 : (splay_tree_key) &DECL_UID (var), ctx);
13840 else
13842 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13844 while (TREE_CODE (var) == INDIRECT_REF
13845 || TREE_CODE (var) == ARRAY_REF)
13846 var = TREE_OPERAND (var, 0);
13848 x = build_receiver_ref (var, false, ctx);
13851 if (is_array_data)
13853 bool is_ref = omp_privatize_by_reference (var);
13854 do_optional_check = true;
13855 /* First, we copy the descriptor data from the host; then
13856 we update its data to point to the target address. */
13857 new_var = lookup_decl (var, ctx);
13858 new_var = DECL_VALUE_EXPR (new_var);
13859 tree v = new_var;
13860 tree v2 = var;
13861 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR
13862 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR)
13863 v2 = maybe_lookup_decl_in_outer_ctx (var, ctx);
13865 if (is_ref)
13867 v2 = build_fold_indirect_ref (v2);
13868 v = create_tmp_var_raw (TREE_TYPE (v2), get_name (var));
13869 gimple_add_tmp_var (v);
13870 TREE_ADDRESSABLE (v) = 1;
13871 gimplify_assign (v, v2, &assign_body);
13872 tree rhs = build_fold_addr_expr (v);
13873 gimple_seq_add_stmt (&assign_body,
13874 gimple_build_assign (new_var, rhs));
13876 else
13877 gimplify_assign (new_var, v2, &assign_body);
13879 v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13880 gcc_assert (v2);
13881 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13882 gimple_seq_add_stmt (&assign_body,
13883 gimple_build_assign (v2, x));
13885 else if (is_variable_sized (var))
13887 tree pvar = DECL_VALUE_EXPR (var);
13888 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13889 pvar = TREE_OPERAND (pvar, 0);
13890 gcc_assert (DECL_P (pvar));
13891 new_var = lookup_decl (pvar, ctx);
13892 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13893 gimple_seq_add_stmt (&assign_body,
13894 gimple_build_assign (new_var, x));
13896 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13897 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13898 && !omp_privatize_by_reference (var)
13899 && !omp_is_allocatable_or_ptr (var))
13900 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13902 new_var = lookup_decl (var, ctx);
13903 new_var = DECL_VALUE_EXPR (new_var);
13904 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13905 new_var = TREE_OPERAND (new_var, 0);
13906 gcc_assert (DECL_P (new_var));
13907 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13908 gimple_seq_add_stmt (&assign_body,
13909 gimple_build_assign (new_var, x));
13911 else
13913 tree type = TREE_TYPE (var);
13914 new_var = lookup_decl (var, ctx);
13915 if (omp_privatize_by_reference (var))
13917 type = TREE_TYPE (type);
13918 if (POINTER_TYPE_P (type)
13919 && TREE_CODE (type) != ARRAY_TYPE
13920 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13921 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13922 || (omp_privatize_by_reference (var)
13923 && omp_is_allocatable_or_ptr (var))))
13925 tree v = create_tmp_var_raw (type, get_name (var));
13926 gimple_add_tmp_var (v);
13927 TREE_ADDRESSABLE (v) = 1;
13928 x = fold_convert (type, x);
13929 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13930 fb_rvalue);
13931 gimple_seq_add_stmt (&assign_body,
13932 gimple_build_assign (v, x));
13933 x = build_fold_addr_expr (v);
13934 do_optional_check = true;
13937 new_var = DECL_VALUE_EXPR (new_var);
13938 x = fold_convert (TREE_TYPE (new_var), x);
13939 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13940 gimple_seq_add_stmt (&assign_body,
13941 gimple_build_assign (new_var, x));
13943 tree present;
13944 present = ((do_optional_check
13945 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13946 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13947 : NULL_TREE);
13948 if (present)
13950 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13951 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13952 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13953 glabel *null_glabel = gimple_build_label (null_label);
13954 glabel *notnull_glabel = gimple_build_label (notnull_label);
13955 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13956 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13957 fb_rvalue);
13958 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13959 fb_rvalue);
13960 gcond *cond = gimple_build_cond_from_tree (present,
13961 notnull_label,
13962 null_label);
13963 gimple_seq_add_stmt (&new_body, cond);
13964 gimple_seq_add_stmt (&new_body, null_glabel);
13965 gimplify_assign (new_var, null_pointer_node, &new_body);
13966 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13967 gimple_seq_add_stmt (&new_body, notnull_glabel);
13968 gimple_seq_add_seq (&new_body, assign_body);
13969 gimple_seq_add_stmt (&new_body,
13970 gimple_build_label (opt_arg_label));
13972 else
13973 gimple_seq_add_seq (&new_body, assign_body);
13974 break;
13976 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13977 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13978 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13979 or references to VLAs. */
13980 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13981 switch (OMP_CLAUSE_CODE (c))
13983 tree var;
13984 default:
13985 break;
13986 case OMP_CLAUSE_MAP:
13987 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13988 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13990 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13991 poly_int64 offset = 0;
13992 gcc_assert (prev);
13993 var = OMP_CLAUSE_DECL (c);
13994 if (DECL_P (var)
13995 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13996 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13997 ctx))
13998 && varpool_node::get_create (var)->offloadable)
13999 break;
14000 if (TREE_CODE (var) == INDIRECT_REF
14001 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
14002 var = TREE_OPERAND (var, 0);
14003 if (TREE_CODE (var) == COMPONENT_REF)
14005 var = get_addr_base_and_unit_offset (var, &offset);
14006 gcc_assert (var != NULL_TREE && DECL_P (var));
14008 else if (DECL_SIZE (var)
14009 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
14011 tree var2 = DECL_VALUE_EXPR (var);
14012 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
14013 var2 = TREE_OPERAND (var2, 0);
14014 gcc_assert (DECL_P (var2));
14015 var = var2;
14017 tree new_var = lookup_decl (var, ctx), x;
14018 tree type = TREE_TYPE (new_var);
14019 bool is_ref;
14020 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
14021 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
14022 == COMPONENT_REF))
14024 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
14025 is_ref = true;
14026 new_var = build2 (MEM_REF, type,
14027 build_fold_addr_expr (new_var),
14028 build_int_cst (build_pointer_type (type),
14029 offset));
14031 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
14033 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
14034 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
14035 new_var = build2 (MEM_REF, type,
14036 build_fold_addr_expr (new_var),
14037 build_int_cst (build_pointer_type (type),
14038 offset));
14040 else
14041 is_ref = omp_privatize_by_reference (var);
14042 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
14043 is_ref = false;
14044 bool ref_to_array = false;
14045 bool ref_to_ptr = false;
14046 if (is_ref)
14048 type = TREE_TYPE (type);
14049 if (TREE_CODE (type) == ARRAY_TYPE)
14051 type = build_pointer_type (type);
14052 ref_to_array = true;
14055 else if (TREE_CODE (type) == ARRAY_TYPE)
14057 tree decl2 = DECL_VALUE_EXPR (new_var);
14058 gcc_assert (TREE_CODE (decl2) == MEM_REF);
14059 decl2 = TREE_OPERAND (decl2, 0);
14060 gcc_assert (DECL_P (decl2));
14061 new_var = decl2;
14062 type = TREE_TYPE (new_var);
14064 else if (TREE_CODE (type) == REFERENCE_TYPE
14065 && TREE_CODE (TREE_TYPE (type)) == POINTER_TYPE)
14067 type = TREE_TYPE (type);
14068 ref_to_ptr = true;
14070 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
14071 x = fold_convert_loc (clause_loc, type, x);
14072 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
14074 tree bias = OMP_CLAUSE_SIZE (c);
14075 if (DECL_P (bias))
14076 bias = lookup_decl (bias, ctx);
14077 bias = fold_convert_loc (clause_loc, sizetype, bias);
14078 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
14079 bias);
14080 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
14081 TREE_TYPE (x), x, bias);
14083 if (ref_to_array)
14084 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
14085 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14086 if ((is_ref && !ref_to_array)
14087 || ref_to_ptr)
14089 tree t = create_tmp_var_raw (type, get_name (var));
14090 gimple_add_tmp_var (t);
14091 TREE_ADDRESSABLE (t) = 1;
14092 gimple_seq_add_stmt (&new_body,
14093 gimple_build_assign (t, x));
14094 x = build_fold_addr_expr_loc (clause_loc, t);
14096 gimple_seq_add_stmt (&new_body,
14097 gimple_build_assign (new_var, x));
14098 prev = NULL_TREE;
14100 else if (OMP_CLAUSE_CHAIN (c)
14101 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
14102 == OMP_CLAUSE_MAP
14103 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
14104 == GOMP_MAP_FIRSTPRIVATE_POINTER
14105 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
14106 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
14107 prev = c;
14108 break;
14109 case OMP_CLAUSE_PRIVATE:
14110 var = OMP_CLAUSE_DECL (c);
14111 if (is_variable_sized (var))
14113 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14114 tree new_var = lookup_decl (var, ctx);
14115 tree pvar = DECL_VALUE_EXPR (var);
14116 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
14117 pvar = TREE_OPERAND (pvar, 0);
14118 gcc_assert (DECL_P (pvar));
14119 tree new_pvar = lookup_decl (pvar, ctx);
14120 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
14121 tree al = size_int (DECL_ALIGN (var));
14122 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
14123 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14124 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
14125 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14126 gimple_seq_add_stmt (&new_body,
14127 gimple_build_assign (new_pvar, x));
14129 else if (omp_privatize_by_reference (var)
14130 && !is_gimple_omp_oacc (ctx->stmt))
14132 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14133 tree new_var = lookup_decl (var, ctx);
14134 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
14135 if (TREE_CONSTANT (x))
14136 break;
14137 else
14139 tree atmp
14140 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
14141 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
14142 tree al = size_int (TYPE_ALIGN (rtype));
14143 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14146 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
14147 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14148 gimple_seq_add_stmt (&new_body,
14149 gimple_build_assign (new_var, x));
14151 break;
14154 gimple_seq fork_seq = NULL;
14155 gimple_seq join_seq = NULL;
14157 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
14159 /* If there are reductions on the offloaded region itself, treat
14160 them as a dummy GANG loop. */
14161 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
14163 gcall *private_marker = lower_oacc_private_marker (ctx);
14165 if (private_marker)
14166 gimple_call_set_arg (private_marker, 2, level);
14168 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
14169 false, NULL, private_marker, NULL, &fork_seq,
14170 &join_seq, ctx);
14173 gimple_seq_add_seq (&new_body, fork_seq);
14174 gimple_seq_add_seq (&new_body, tgt_body);
14175 gimple_seq_add_seq (&new_body, join_seq);
14177 if (offloaded)
14179 new_body = maybe_catch_exception (new_body);
14180 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
14182 gimple_omp_set_body (stmt, new_body);
14185 bind = gimple_build_bind (NULL, NULL,
14186 tgt_bind ? gimple_bind_block (tgt_bind)
14187 : NULL_TREE);
14188 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
14189 gimple_bind_add_seq (bind, ilist);
14190 gimple_bind_add_stmt (bind, stmt);
14191 gimple_bind_add_seq (bind, olist);
14193 pop_gimplify_context (NULL);
14195 if (dep_bind)
14197 gimple_bind_add_seq (dep_bind, dep_ilist);
14198 gimple_bind_add_stmt (dep_bind, bind);
14199 gimple_bind_add_seq (dep_bind, dep_olist);
14200 pop_gimplify_context (dep_bind);
14204 /* Expand code for an OpenMP teams directive. */
14206 static void
14207 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14209 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
14210 push_gimplify_context ();
14212 tree block = make_node (BLOCK);
14213 gbind *bind = gimple_build_bind (NULL, NULL, block);
14214 gsi_replace (gsi_p, bind, true);
14215 gimple_seq bind_body = NULL;
14216 gimple_seq dlist = NULL;
14217 gimple_seq olist = NULL;
14219 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14220 OMP_CLAUSE_NUM_TEAMS);
14221 tree num_teams_lower = NULL_TREE;
14222 if (num_teams == NULL_TREE)
14223 num_teams = build_int_cst (unsigned_type_node, 0);
14224 else
14226 num_teams_lower = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams);
14227 if (num_teams_lower)
14229 num_teams_lower = fold_convert (unsigned_type_node, num_teams_lower);
14230 gimplify_expr (&num_teams_lower, &bind_body, NULL, is_gimple_val,
14231 fb_rvalue);
14233 num_teams = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams);
14234 num_teams = fold_convert (unsigned_type_node, num_teams);
14235 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
14237 if (num_teams_lower == NULL_TREE)
14238 num_teams_lower = num_teams;
14239 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14240 OMP_CLAUSE_THREAD_LIMIT);
14241 if (thread_limit == NULL_TREE)
14242 thread_limit = build_int_cst (unsigned_type_node, 0);
14243 else
14245 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
14246 thread_limit = fold_convert (unsigned_type_node, thread_limit);
14247 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
14248 fb_rvalue);
14250 location_t loc = gimple_location (teams_stmt);
14251 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4);
14252 tree rettype = TREE_TYPE (TREE_TYPE (decl));
14253 tree first = create_tmp_var (rettype);
14254 gimple_seq_add_stmt (&bind_body,
14255 gimple_build_assign (first, build_one_cst (rettype)));
14256 tree llabel = create_artificial_label (loc);
14257 gimple_seq_add_stmt (&bind_body, gimple_build_label (llabel));
14258 gimple *call
14259 = gimple_build_call (decl, 4, num_teams_lower, num_teams, thread_limit,
14260 first);
14261 gimple_set_location (call, loc);
14262 tree temp = create_tmp_var (rettype);
14263 gimple_call_set_lhs (call, temp);
14264 gimple_seq_add_stmt (&bind_body, call);
14266 tree tlabel = create_artificial_label (loc);
14267 tree flabel = create_artificial_label (loc);
14268 gimple *cond = gimple_build_cond (NE_EXPR, temp, build_zero_cst (rettype),
14269 tlabel, flabel);
14270 gimple_seq_add_stmt (&bind_body, cond);
14271 gimple_seq_add_stmt (&bind_body, gimple_build_label (tlabel));
14272 gimple_seq_add_stmt (&bind_body,
14273 gimple_build_assign (first, build_zero_cst (rettype)));
14275 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
14276 &bind_body, &dlist, ctx, NULL);
14277 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
14278 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
14279 NULL, ctx);
14280 gimple_seq_add_stmt (&bind_body, teams_stmt);
14282 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
14283 gimple_omp_set_body (teams_stmt, NULL);
14284 gimple_seq_add_seq (&bind_body, olist);
14285 gimple_seq_add_seq (&bind_body, dlist);
14286 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
14287 gimple_seq_add_stmt (&bind_body, gimple_build_goto (llabel));
14288 gimple_seq_add_stmt (&bind_body, gimple_build_label (flabel));
14289 gimple_bind_set_body (bind, bind_body);
14291 pop_gimplify_context (bind);
14293 gimple_bind_append_vars (bind, ctx->block_vars);
14294 BLOCK_VARS (block) = ctx->block_vars;
14295 if (BLOCK_VARS (block))
14296 TREE_USED (block) = 1;
14299 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14300 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14301 of OMP context, but with make_addressable_vars set. */
14303 static tree
14304 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
14305 void *data)
14307 tree t = *tp;
14309 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14310 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
14311 && data == NULL
14312 && DECL_HAS_VALUE_EXPR_P (t))
14313 return t;
14315 if (make_addressable_vars
14316 && DECL_P (t)
14317 && bitmap_bit_p (make_addressable_vars, DECL_UID (t)))
14318 return t;
14320 /* If a global variable has been privatized, TREE_CONSTANT on
14321 ADDR_EXPR might be wrong. */
14322 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
14323 recompute_tree_invariant_for_addr_expr (t);
14325 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
14326 return NULL_TREE;
14329 /* Data to be communicated between lower_omp_regimplify_operands and
14330 lower_omp_regimplify_operands_p. */
14332 struct lower_omp_regimplify_operands_data
14334 omp_context *ctx;
14335 vec<tree> *decls;
14338 /* Helper function for lower_omp_regimplify_operands. Find
14339 omp_member_access_dummy_var vars and adjust temporarily their
14340 DECL_VALUE_EXPRs if needed. */
14342 static tree
14343 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
14344 void *data)
14346 tree t = omp_member_access_dummy_var (*tp);
14347 if (t)
14349 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
14350 lower_omp_regimplify_operands_data *ldata
14351 = (lower_omp_regimplify_operands_data *) wi->info;
14352 tree o = maybe_lookup_decl (t, ldata->ctx);
14353 if (o != t)
14355 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
14356 ldata->decls->safe_push (*tp);
14357 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
14358 SET_DECL_VALUE_EXPR (*tp, v);
14361 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
14362 return NULL_TREE;
14365 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14366 of omp_member_access_dummy_var vars during regimplification. */
14368 static void
14369 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
14370 gimple_stmt_iterator *gsi_p)
14372 auto_vec<tree, 10> decls;
14373 if (ctx)
14375 struct walk_stmt_info wi;
14376 memset (&wi, '\0', sizeof (wi));
14377 struct lower_omp_regimplify_operands_data data;
14378 data.ctx = ctx;
14379 data.decls = &decls;
14380 wi.info = &data;
14381 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14383 gimple_regimplify_operands (stmt, gsi_p);
14384 while (!decls.is_empty ())
14386 tree t = decls.pop ();
14387 tree v = decls.pop ();
14388 SET_DECL_VALUE_EXPR (t, v);
14392 static void
14393 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14395 gimple *stmt = gsi_stmt (*gsi_p);
14396 struct walk_stmt_info wi;
14397 gcall *call_stmt;
14399 if (gimple_has_location (stmt))
14400 input_location = gimple_location (stmt);
14402 if (make_addressable_vars)
14403 memset (&wi, '\0', sizeof (wi));
14405 /* If we have issued syntax errors, avoid doing any heavy lifting.
14406 Just replace the OMP directives with a NOP to avoid
14407 confusing RTL expansion. */
14408 if (seen_error () && is_gimple_omp (stmt))
14410 gsi_replace (gsi_p, gimple_build_nop (), true);
14411 return;
14414 switch (gimple_code (stmt))
14416 case GIMPLE_COND:
14418 gcond *cond_stmt = as_a <gcond *> (stmt);
14419 if ((ctx || make_addressable_vars)
14420 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14421 lower_omp_regimplify_p,
14422 ctx ? NULL : &wi, NULL)
14423 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14424 lower_omp_regimplify_p,
14425 ctx ? NULL : &wi, NULL)))
14426 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14428 break;
14429 case GIMPLE_CATCH:
14430 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14431 break;
14432 case GIMPLE_EH_FILTER:
14433 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14434 break;
14435 case GIMPLE_TRY:
14436 lower_omp (gimple_try_eval_ptr (stmt), ctx);
14437 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14438 break;
14439 case GIMPLE_ASSUME:
14440 lower_omp (gimple_assume_body_ptr (stmt), ctx);
14441 break;
14442 case GIMPLE_TRANSACTION:
14443 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14444 ctx);
14445 break;
14446 case GIMPLE_BIND:
14447 if (ctx && is_gimple_omp_oacc (ctx->stmt))
14449 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14450 oacc_privatization_scan_decl_chain (ctx, vars);
14452 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14453 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14454 break;
14455 case GIMPLE_OMP_PARALLEL:
14456 case GIMPLE_OMP_TASK:
14457 ctx = maybe_lookup_ctx (stmt);
14458 gcc_assert (ctx);
14459 if (ctx->cancellable)
14460 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14461 lower_omp_taskreg (gsi_p, ctx);
14462 break;
14463 case GIMPLE_OMP_FOR:
14464 ctx = maybe_lookup_ctx (stmt);
14465 gcc_assert (ctx);
14466 if (ctx->cancellable)
14467 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14468 lower_omp_for (gsi_p, ctx);
14469 break;
14470 case GIMPLE_OMP_SECTIONS:
14471 ctx = maybe_lookup_ctx (stmt);
14472 gcc_assert (ctx);
14473 if (ctx->cancellable)
14474 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14475 lower_omp_sections (gsi_p, ctx);
14476 break;
14477 case GIMPLE_OMP_SCOPE:
14478 ctx = maybe_lookup_ctx (stmt);
14479 gcc_assert (ctx);
14480 lower_omp_scope (gsi_p, ctx);
14481 break;
14482 case GIMPLE_OMP_SINGLE:
14483 ctx = maybe_lookup_ctx (stmt);
14484 gcc_assert (ctx);
14485 lower_omp_single (gsi_p, ctx);
14486 break;
14487 case GIMPLE_OMP_MASTER:
14488 case GIMPLE_OMP_MASKED:
14489 ctx = maybe_lookup_ctx (stmt);
14490 gcc_assert (ctx);
14491 lower_omp_master (gsi_p, ctx);
14492 break;
14493 case GIMPLE_OMP_TASKGROUP:
14494 ctx = maybe_lookup_ctx (stmt);
14495 gcc_assert (ctx);
14496 lower_omp_taskgroup (gsi_p, ctx);
14497 break;
14498 case GIMPLE_OMP_ORDERED:
14499 ctx = maybe_lookup_ctx (stmt);
14500 gcc_assert (ctx);
14501 lower_omp_ordered (gsi_p, ctx);
14502 break;
14503 case GIMPLE_OMP_SCAN:
14504 ctx = maybe_lookup_ctx (stmt);
14505 gcc_assert (ctx);
14506 lower_omp_scan (gsi_p, ctx);
14507 break;
14508 case GIMPLE_OMP_CRITICAL:
14509 ctx = maybe_lookup_ctx (stmt);
14510 gcc_assert (ctx);
14511 lower_omp_critical (gsi_p, ctx);
14512 break;
14513 case GIMPLE_OMP_ATOMIC_LOAD:
14514 if ((ctx || make_addressable_vars)
14515 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14516 as_a <gomp_atomic_load *> (stmt)),
14517 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14518 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14519 break;
14520 case GIMPLE_OMP_TARGET:
14521 ctx = maybe_lookup_ctx (stmt);
14522 gcc_assert (ctx);
14523 lower_omp_target (gsi_p, ctx);
14524 break;
14525 case GIMPLE_OMP_TEAMS:
14526 ctx = maybe_lookup_ctx (stmt);
14527 gcc_assert (ctx);
14528 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14529 lower_omp_taskreg (gsi_p, ctx);
14530 else
14531 lower_omp_teams (gsi_p, ctx);
14532 break;
14533 case GIMPLE_CALL:
14534 tree fndecl;
14535 call_stmt = as_a <gcall *> (stmt);
14536 fndecl = gimple_call_fndecl (call_stmt);
14537 if (fndecl
14538 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14539 switch (DECL_FUNCTION_CODE (fndecl))
14541 case BUILT_IN_GOMP_BARRIER:
14542 if (ctx == NULL)
14543 break;
14544 /* FALLTHRU */
14545 case BUILT_IN_GOMP_CANCEL:
14546 case BUILT_IN_GOMP_CANCELLATION_POINT:
14547 omp_context *cctx;
14548 cctx = ctx;
14549 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14550 cctx = cctx->outer;
14551 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14552 if (!cctx->cancellable)
14554 if (DECL_FUNCTION_CODE (fndecl)
14555 == BUILT_IN_GOMP_CANCELLATION_POINT)
14557 stmt = gimple_build_nop ();
14558 gsi_replace (gsi_p, stmt, false);
14560 break;
14562 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14564 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14565 gimple_call_set_fndecl (call_stmt, fndecl);
14566 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14568 tree lhs;
14569 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14570 gimple_call_set_lhs (call_stmt, lhs);
14571 tree fallthru_label;
14572 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14573 gimple *g;
14574 g = gimple_build_label (fallthru_label);
14575 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14576 g = gimple_build_cond (NE_EXPR, lhs,
14577 fold_convert (TREE_TYPE (lhs),
14578 boolean_false_node),
14579 cctx->cancel_label, fallthru_label);
14580 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14581 break;
14582 default:
14583 break;
14585 goto regimplify;
14587 case GIMPLE_ASSIGN:
14588 for (omp_context *up = ctx; up; up = up->outer)
14590 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14591 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14592 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14593 || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14594 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14595 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14596 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14597 && (gimple_omp_target_kind (up->stmt)
14598 == GF_OMP_TARGET_KIND_DATA)))
14599 continue;
14600 else if (!up->lastprivate_conditional_map)
14601 break;
14602 tree lhs = get_base_address (gimple_assign_lhs (stmt));
14603 if (TREE_CODE (lhs) == MEM_REF
14604 && DECL_P (TREE_OPERAND (lhs, 0))
14605 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14606 0))) == REFERENCE_TYPE)
14607 lhs = TREE_OPERAND (lhs, 0);
14608 if (DECL_P (lhs))
14609 if (tree *v = up->lastprivate_conditional_map->get (lhs))
14611 tree clauses;
14612 if (up->combined_into_simd_safelen1)
14614 up = up->outer;
14615 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14616 up = up->outer;
14618 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14619 clauses = gimple_omp_for_clauses (up->stmt);
14620 else
14621 clauses = gimple_omp_sections_clauses (up->stmt);
14622 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14623 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14624 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14625 OMP_CLAUSE__CONDTEMP_);
14626 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14627 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14628 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14631 /* FALLTHRU */
14633 default:
14634 regimplify:
14635 if ((ctx || make_addressable_vars)
14636 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14637 ctx ? NULL : &wi))
14639 /* Just remove clobbers, this should happen only if we have
14640 "privatized" local addressable variables in SIMD regions,
14641 the clobber isn't needed in that case and gimplifying address
14642 of the ARRAY_REF into a pointer and creating MEM_REF based
14643 clobber would create worse code than we get with the clobber
14644 dropped. */
14645 if (gimple_clobber_p (stmt))
14647 gsi_replace (gsi_p, gimple_build_nop (), true);
14648 break;
14650 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14652 break;
14656 static void
14657 lower_omp (gimple_seq *body, omp_context *ctx)
14659 location_t saved_location = input_location;
14660 gimple_stmt_iterator gsi;
14661 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14662 lower_omp_1 (&gsi, ctx);
14663 /* During gimplification, we haven't folded statments inside offloading
14664 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14665 if (target_nesting_level || taskreg_nesting_level)
14666 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14667 fold_stmt (&gsi);
14668 input_location = saved_location;
14671 /* Main entry point. */
14673 static unsigned int
14674 execute_lower_omp (void)
14676 gimple_seq body;
14677 int i;
14678 omp_context *ctx;
14680 /* This pass always runs, to provide PROP_gimple_lomp.
14681 But often, there is nothing to do. */
14682 if (flag_openacc == 0 && flag_openmp == 0
14683 && flag_openmp_simd == 0)
14684 return 0;
14686 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14687 delete_omp_context);
14689 body = gimple_body (current_function_decl);
14691 scan_omp (&body, NULL);
14692 gcc_assert (taskreg_nesting_level == 0);
14693 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14694 finish_taskreg_scan (ctx);
14695 taskreg_contexts.release ();
14697 if (all_contexts->root)
14699 if (make_addressable_vars)
14700 push_gimplify_context ();
14701 lower_omp (&body, NULL);
14702 if (make_addressable_vars)
14703 pop_gimplify_context (NULL);
14706 if (all_contexts)
14708 splay_tree_delete (all_contexts);
14709 all_contexts = NULL;
14711 BITMAP_FREE (make_addressable_vars);
14712 BITMAP_FREE (global_nonaddressable_vars);
14714 /* If current function is a method, remove artificial dummy VAR_DECL created
14715 for non-static data member privatization, they aren't needed for
14716 debuginfo nor anything else, have been already replaced everywhere in the
14717 IL and cause problems with LTO. */
14718 if (DECL_ARGUMENTS (current_function_decl)
14719 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14720 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14721 == POINTER_TYPE))
14722 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14724 for (auto task_stmt : task_cpyfns)
14725 finalize_task_copyfn (task_stmt);
14726 task_cpyfns.release ();
14727 return 0;
14730 namespace {
14732 const pass_data pass_data_lower_omp =
14734 GIMPLE_PASS, /* type */
14735 "omplower", /* name */
14736 OPTGROUP_OMP, /* optinfo_flags */
14737 TV_NONE, /* tv_id */
14738 PROP_gimple_any, /* properties_required */
14739 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14740 0, /* properties_destroyed */
14741 0, /* todo_flags_start */
14742 0, /* todo_flags_finish */
14745 class pass_lower_omp : public gimple_opt_pass
14747 public:
14748 pass_lower_omp (gcc::context *ctxt)
14749 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14752 /* opt_pass methods: */
14753 unsigned int execute (function *) final override
14755 return execute_lower_omp ();
14758 }; // class pass_lower_omp
14760 } // anon namespace
14762 gimple_opt_pass *
14763 make_pass_lower_omp (gcc::context *ctxt)
14765 return new pass_lower_omp (ctxt);
14768 /* The following is a utility to diagnose structured block violations.
14769 It is not part of the "omplower" pass, as that's invoked too late. It
14770 should be invoked by the respective front ends after gimplification. */
14772 static splay_tree all_labels;
14774 /* Check for mismatched contexts and generate an error if needed. Return
14775 true if an error is detected. */
14777 static bool
14778 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14779 gimple *branch_ctx, gimple *label_ctx)
14781 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14782 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14784 if (label_ctx == branch_ctx)
14785 return false;
14787 const char* kind = NULL;
14789 if (flag_openacc)
14791 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14792 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14794 gcc_checking_assert (kind == NULL);
14795 kind = "OpenACC";
14798 if (kind == NULL)
14800 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14801 kind = "OpenMP";
14804 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14805 so we could traverse it and issue a correct "exit" or "enter" error
14806 message upon a structured block violation.
14808 We built the context by building a list with tree_cons'ing, but there is
14809 no easy counterpart in gimple tuples. It seems like far too much work
14810 for issuing exit/enter error messages. If someone really misses the
14811 distinct error message... patches welcome. */
14813 #if 0
14814 /* Try to avoid confusing the user by producing and error message
14815 with correct "exit" or "enter" verbiage. We prefer "exit"
14816 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14817 if (branch_ctx == NULL)
14818 exit_p = false;
14819 else
14821 while (label_ctx)
14823 if (TREE_VALUE (label_ctx) == branch_ctx)
14825 exit_p = false;
14826 break;
14828 label_ctx = TREE_CHAIN (label_ctx);
14832 if (exit_p)
14833 error ("invalid exit from %s structured block", kind);
14834 else
14835 error ("invalid entry to %s structured block", kind);
14836 #endif
14838 /* If it's obvious we have an invalid entry, be specific about the error. */
14839 if (branch_ctx == NULL)
14840 error ("invalid entry to %s structured block", kind);
14841 else
14843 /* Otherwise, be vague and lazy, but efficient. */
14844 error ("invalid branch to/from %s structured block", kind);
14847 gsi_replace (gsi_p, gimple_build_nop (), false);
14848 return true;
14851 /* Pass 1: Create a minimal tree of structured blocks, and record
14852 where each label is found. */
14854 static tree
14855 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14856 struct walk_stmt_info *wi)
14858 gimple *context = (gimple *) wi->info;
14859 gimple *inner_context;
14860 gimple *stmt = gsi_stmt (*gsi_p);
14862 *handled_ops_p = true;
14864 switch (gimple_code (stmt))
14866 WALK_SUBSTMTS;
14868 case GIMPLE_OMP_PARALLEL:
14869 case GIMPLE_OMP_TASK:
14870 case GIMPLE_OMP_SCOPE:
14871 case GIMPLE_OMP_SECTIONS:
14872 case GIMPLE_OMP_SINGLE:
14873 case GIMPLE_OMP_SECTION:
14874 case GIMPLE_OMP_MASTER:
14875 case GIMPLE_OMP_MASKED:
14876 case GIMPLE_OMP_ORDERED:
14877 case GIMPLE_OMP_SCAN:
14878 case GIMPLE_OMP_CRITICAL:
14879 case GIMPLE_OMP_TARGET:
14880 case GIMPLE_OMP_TEAMS:
14881 case GIMPLE_OMP_TASKGROUP:
14882 /* The minimal context here is just the current OMP construct. */
14883 inner_context = stmt;
14884 wi->info = inner_context;
14885 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14886 wi->info = context;
14887 break;
14889 case GIMPLE_OMP_FOR:
14890 inner_context = stmt;
14891 wi->info = inner_context;
14892 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14893 walk them. */
14894 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14895 diagnose_sb_1, NULL, wi);
14896 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14897 wi->info = context;
14898 break;
14900 case GIMPLE_LABEL:
14901 splay_tree_insert (all_labels,
14902 (splay_tree_key) gimple_label_label (
14903 as_a <glabel *> (stmt)),
14904 (splay_tree_value) context);
14905 break;
14907 default:
14908 break;
14911 return NULL_TREE;
14914 /* Pass 2: Check each branch and see if its context differs from that of
14915 the destination label's context. */
14917 static tree
14918 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14919 struct walk_stmt_info *wi)
14921 gimple *context = (gimple *) wi->info;
14922 splay_tree_node n;
14923 gimple *stmt = gsi_stmt (*gsi_p);
14925 *handled_ops_p = true;
14927 switch (gimple_code (stmt))
14929 WALK_SUBSTMTS;
14931 case GIMPLE_OMP_PARALLEL:
14932 case GIMPLE_OMP_TASK:
14933 case GIMPLE_OMP_SCOPE:
14934 case GIMPLE_OMP_SECTIONS:
14935 case GIMPLE_OMP_SINGLE:
14936 case GIMPLE_OMP_SECTION:
14937 case GIMPLE_OMP_MASTER:
14938 case GIMPLE_OMP_MASKED:
14939 case GIMPLE_OMP_ORDERED:
14940 case GIMPLE_OMP_SCAN:
14941 case GIMPLE_OMP_CRITICAL:
14942 case GIMPLE_OMP_TARGET:
14943 case GIMPLE_OMP_TEAMS:
14944 case GIMPLE_OMP_TASKGROUP:
14945 wi->info = stmt;
14946 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14947 wi->info = context;
14948 break;
14950 case GIMPLE_OMP_FOR:
14951 wi->info = stmt;
14952 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14953 walk them. */
14954 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14955 diagnose_sb_2, NULL, wi);
14956 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14957 wi->info = context;
14958 break;
14960 case GIMPLE_COND:
14962 gcond *cond_stmt = as_a <gcond *> (stmt);
14963 tree lab = gimple_cond_true_label (cond_stmt);
14964 if (lab)
14966 n = splay_tree_lookup (all_labels,
14967 (splay_tree_key) lab);
14968 diagnose_sb_0 (gsi_p, context,
14969 n ? (gimple *) n->value : NULL);
14971 lab = gimple_cond_false_label (cond_stmt);
14972 if (lab)
14974 n = splay_tree_lookup (all_labels,
14975 (splay_tree_key) lab);
14976 diagnose_sb_0 (gsi_p, context,
14977 n ? (gimple *) n->value : NULL);
14980 break;
14982 case GIMPLE_GOTO:
14984 tree lab = gimple_goto_dest (stmt);
14985 if (TREE_CODE (lab) != LABEL_DECL)
14986 break;
14988 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14989 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14991 break;
14993 case GIMPLE_SWITCH:
14995 gswitch *switch_stmt = as_a <gswitch *> (stmt);
14996 unsigned int i;
14997 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
14999 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
15000 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
15001 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
15002 break;
15005 break;
15007 case GIMPLE_RETURN:
15008 diagnose_sb_0 (gsi_p, context, NULL);
15009 break;
15011 default:
15012 break;
15015 return NULL_TREE;
15018 static unsigned int
15019 diagnose_omp_structured_block_errors (void)
15021 struct walk_stmt_info wi;
15022 gimple_seq body = gimple_body (current_function_decl);
15024 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
15026 memset (&wi, 0, sizeof (wi));
15027 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
15029 memset (&wi, 0, sizeof (wi));
15030 wi.want_locations = true;
15031 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
15033 gimple_set_body (current_function_decl, body);
15035 splay_tree_delete (all_labels);
15036 all_labels = NULL;
15038 return 0;
15041 namespace {
15043 const pass_data pass_data_diagnose_omp_blocks =
15045 GIMPLE_PASS, /* type */
15046 "*diagnose_omp_blocks", /* name */
15047 OPTGROUP_OMP, /* optinfo_flags */
15048 TV_NONE, /* tv_id */
15049 PROP_gimple_any, /* properties_required */
15050 0, /* properties_provided */
15051 0, /* properties_destroyed */
15052 0, /* todo_flags_start */
15053 0, /* todo_flags_finish */
15056 class pass_diagnose_omp_blocks : public gimple_opt_pass
15058 public:
15059 pass_diagnose_omp_blocks (gcc::context *ctxt)
15060 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
15063 /* opt_pass methods: */
15064 bool gate (function *) final override
15066 return flag_openacc || flag_openmp || flag_openmp_simd;
15068 unsigned int execute (function *) final override
15070 return diagnose_omp_structured_block_errors ();
15073 }; // class pass_diagnose_omp_blocks
15075 } // anon namespace
15077 gimple_opt_pass *
15078 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
15080 return new pass_diagnose_omp_blocks (ctxt);
15084 #include "gt-omp-low.h"