Skip gcc.dg/guality/example.c on hppa-linux.
[official-gcc.git] / gcc / omp-low.c
blob2a07beb4eaf5514cb454c575da0c03f9b10eb7d3
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2021 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
154 /* True if this construct can be cancelled. */
155 bool cancellable;
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap task_shared_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
195 static void scan_omp (gimple_seq *, omp_context *);
196 static tree scan_omp_1_op (tree *, int *, void *);
198 #define WALK_SUBSTMTS \
199 case GIMPLE_BIND: \
200 case GIMPLE_TRY: \
201 case GIMPLE_CATCH: \
202 case GIMPLE_EH_FILTER: \
203 case GIMPLE_TRANSACTION: \
204 /* The sub-statements for these should be walked. */ \
205 *handled_ops_p = false; \
206 break;
208 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
209 (This doesn't include OpenACC 'kernels' decomposed parts.) */
211 static bool
212 is_oacc_parallel_or_serial (omp_context *ctx)
214 enum gimple_code outer_type = gimple_code (ctx->stmt);
215 return ((outer_type == GIMPLE_OMP_TARGET)
216 && ((gimple_omp_target_kind (ctx->stmt)
217 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
218 || (gimple_omp_target_kind (ctx->stmt)
219 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
222 /* Return whether CTX represents an OpenACC 'kernels' construct.
223 (This doesn't include OpenACC 'kernels' decomposed parts.) */
225 static bool
226 is_oacc_kernels (omp_context *ctx)
228 enum gimple_code outer_type = gimple_code (ctx->stmt);
229 return ((outer_type == GIMPLE_OMP_TARGET)
230 && (gimple_omp_target_kind (ctx->stmt)
231 == GF_OMP_TARGET_KIND_OACC_KERNELS));
234 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
236 static bool
237 is_oacc_kernels_decomposed_part (omp_context *ctx)
239 enum gimple_code outer_type = gimple_code (ctx->stmt);
240 return ((outer_type == GIMPLE_OMP_TARGET)
241 && ((gimple_omp_target_kind (ctx->stmt)
242 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
243 || (gimple_omp_target_kind (ctx->stmt)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
245 || (gimple_omp_target_kind (ctx->stmt)
246 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
249 /* Return true if STMT corresponds to an OpenMP target region. */
250 static bool
251 is_omp_target (gimple *stmt)
253 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
255 int kind = gimple_omp_target_kind (stmt);
256 return (kind == GF_OMP_TARGET_KIND_REGION
257 || kind == GF_OMP_TARGET_KIND_DATA
258 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
259 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
261 return false;
264 /* If DECL is the artificial dummy VAR_DECL created for non-static
265 data member privatization, return the underlying "this" parameter,
266 otherwise return NULL. */
268 tree
269 omp_member_access_dummy_var (tree decl)
271 if (!VAR_P (decl)
272 || !DECL_ARTIFICIAL (decl)
273 || !DECL_IGNORED_P (decl)
274 || !DECL_HAS_VALUE_EXPR_P (decl)
275 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
276 return NULL_TREE;
278 tree v = DECL_VALUE_EXPR (decl);
279 if (TREE_CODE (v) != COMPONENT_REF)
280 return NULL_TREE;
282 while (1)
283 switch (TREE_CODE (v))
285 case COMPONENT_REF:
286 case MEM_REF:
287 case INDIRECT_REF:
288 CASE_CONVERT:
289 case POINTER_PLUS_EXPR:
290 v = TREE_OPERAND (v, 0);
291 continue;
292 case PARM_DECL:
293 if (DECL_CONTEXT (v) == current_function_decl
294 && DECL_ARTIFICIAL (v)
295 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
296 return v;
297 return NULL_TREE;
298 default:
299 return NULL_TREE;
303 /* Helper for unshare_and_remap, called through walk_tree. */
305 static tree
306 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
308 tree *pair = (tree *) data;
309 if (*tp == pair[0])
311 *tp = unshare_expr (pair[1]);
312 *walk_subtrees = 0;
314 else if (IS_TYPE_OR_DECL_P (*tp))
315 *walk_subtrees = 0;
316 return NULL_TREE;
319 /* Return unshare_expr (X) with all occurrences of FROM
320 replaced with TO. */
322 static tree
323 unshare_and_remap (tree x, tree from, tree to)
325 tree pair[2] = { from, to };
326 x = unshare_expr (x);
327 walk_tree (&x, unshare_and_remap_1, pair, NULL);
328 return x;
331 /* Convenience function for calling scan_omp_1_op on tree operands. */
333 static inline tree
334 scan_omp_op (tree *tp, omp_context *ctx)
336 struct walk_stmt_info wi;
338 memset (&wi, 0, sizeof (wi));
339 wi.info = ctx;
340 wi.want_locations = true;
342 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
345 static void lower_omp (gimple_seq *, omp_context *);
346 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
347 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
349 /* Return true if CTX is for an omp parallel. */
351 static inline bool
352 is_parallel_ctx (omp_context *ctx)
354 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
358 /* Return true if CTX is for an omp task. */
360 static inline bool
361 is_task_ctx (omp_context *ctx)
363 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
367 /* Return true if CTX is for an omp taskloop. */
369 static inline bool
370 is_taskloop_ctx (omp_context *ctx)
372 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
373 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
377 /* Return true if CTX is for a host omp teams. */
379 static inline bool
380 is_host_teams_ctx (omp_context *ctx)
382 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
383 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
386 /* Return true if CTX is for an omp parallel or omp task or host omp teams
387 (the last one is strictly not a task region in OpenMP speak, but we
388 need to treat it similarly). */
390 static inline bool
391 is_taskreg_ctx (omp_context *ctx)
393 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
396 /* Return true if EXPR is variable sized. */
398 static inline bool
399 is_variable_sized (const_tree expr)
401 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
404 /* Lookup variables. The "maybe" form
405 allows for the variable form to not have been entered, otherwise we
406 assert that the variable must have been entered. */
408 static inline tree
409 lookup_decl (tree var, omp_context *ctx)
411 tree *n = ctx->cb.decl_map->get (var);
412 return *n;
415 static inline tree
416 maybe_lookup_decl (const_tree var, omp_context *ctx)
418 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
419 return n ? *n : NULL_TREE;
422 static inline tree
423 lookup_field (tree var, omp_context *ctx)
425 splay_tree_node n;
426 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
427 return (tree) n->value;
430 static inline tree
431 lookup_sfield (splay_tree_key key, omp_context *ctx)
433 splay_tree_node n;
434 n = splay_tree_lookup (ctx->sfield_map
435 ? ctx->sfield_map : ctx->field_map, key);
436 return (tree) n->value;
439 static inline tree
440 lookup_sfield (tree var, omp_context *ctx)
442 return lookup_sfield ((splay_tree_key) var, ctx);
445 static inline tree
446 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
448 splay_tree_node n;
449 n = splay_tree_lookup (ctx->field_map, key);
450 return n ? (tree) n->value : NULL_TREE;
453 static inline tree
454 maybe_lookup_field (tree var, omp_context *ctx)
456 return maybe_lookup_field ((splay_tree_key) var, ctx);
459 /* Return true if DECL should be copied by pointer. SHARED_CTX is
460 the parallel context if DECL is to be shared. */
462 static bool
463 use_pointer_for_field (tree decl, omp_context *shared_ctx)
465 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
466 || TYPE_ATOMIC (TREE_TYPE (decl)))
467 return true;
469 /* We can only use copy-in/copy-out semantics for shared variables
470 when we know the value is not accessible from an outer scope. */
471 if (shared_ctx)
473 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
475 /* ??? Trivially accessible from anywhere. But why would we even
476 be passing an address in this case? Should we simply assert
477 this to be false, or should we have a cleanup pass that removes
478 these from the list of mappings? */
479 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
480 return true;
482 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
483 without analyzing the expression whether or not its location
484 is accessible to anyone else. In the case of nested parallel
485 regions it certainly may be. */
486 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
487 return true;
489 /* Do not use copy-in/copy-out for variables that have their
490 address taken. */
491 if (is_global_var (decl))
493 /* For file scope vars, track whether we've seen them as
494 non-addressable initially and in that case, keep the same
495 answer for the duration of the pass, even when they are made
496 addressable later on e.g. through reduction expansion. Global
497 variables which weren't addressable before the pass will not
498 have their privatized copies address taken. See PR91216. */
499 if (!TREE_ADDRESSABLE (decl))
501 if (!global_nonaddressable_vars)
502 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
503 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
505 else if (!global_nonaddressable_vars
506 || !bitmap_bit_p (global_nonaddressable_vars,
507 DECL_UID (decl)))
508 return true;
510 else if (TREE_ADDRESSABLE (decl))
511 return true;
513 /* lower_send_shared_vars only uses copy-in, but not copy-out
514 for these. */
515 if (TREE_READONLY (decl)
516 || ((TREE_CODE (decl) == RESULT_DECL
517 || TREE_CODE (decl) == PARM_DECL)
518 && DECL_BY_REFERENCE (decl)))
519 return false;
521 /* Disallow copy-in/out in nested parallel if
522 decl is shared in outer parallel, otherwise
523 each thread could store the shared variable
524 in its own copy-in location, making the
525 variable no longer really shared. */
526 if (shared_ctx->is_nested)
528 omp_context *up;
530 for (up = shared_ctx->outer; up; up = up->outer)
531 if ((is_taskreg_ctx (up)
532 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
533 && is_gimple_omp_offloaded (up->stmt)))
534 && maybe_lookup_decl (decl, up))
535 break;
537 if (up)
539 tree c;
541 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
543 for (c = gimple_omp_target_clauses (up->stmt);
544 c; c = OMP_CLAUSE_CHAIN (c))
545 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
546 && OMP_CLAUSE_DECL (c) == decl)
547 break;
549 else
550 for (c = gimple_omp_taskreg_clauses (up->stmt);
551 c; c = OMP_CLAUSE_CHAIN (c))
552 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
553 && OMP_CLAUSE_DECL (c) == decl)
554 break;
556 if (c)
557 goto maybe_mark_addressable_and_ret;
561 /* For tasks avoid using copy-in/out. As tasks can be
562 deferred or executed in different thread, when GOMP_task
563 returns, the task hasn't necessarily terminated. */
564 if (is_task_ctx (shared_ctx))
566 tree outer;
567 maybe_mark_addressable_and_ret:
568 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
569 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
571 /* Taking address of OUTER in lower_send_shared_vars
572 might need regimplification of everything that uses the
573 variable. */
574 if (!task_shared_vars)
575 task_shared_vars = BITMAP_ALLOC (NULL);
576 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
577 TREE_ADDRESSABLE (outer) = 1;
579 return true;
583 return false;
586 /* Construct a new automatic decl similar to VAR. */
588 static tree
589 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
591 tree copy = copy_var_decl (var, name, type);
593 DECL_CONTEXT (copy) = current_function_decl;
595 if (ctx)
597 DECL_CHAIN (copy) = ctx->block_vars;
598 ctx->block_vars = copy;
600 else
601 record_vars (copy);
603 /* If VAR is listed in task_shared_vars, it means it wasn't
604 originally addressable and is just because task needs to take
605 it's address. But we don't need to take address of privatizations
606 from that var. */
607 if (TREE_ADDRESSABLE (var)
608 && ((task_shared_vars
609 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
610 || (global_nonaddressable_vars
611 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
612 TREE_ADDRESSABLE (copy) = 0;
614 return copy;
617 static tree
618 omp_copy_decl_1 (tree var, omp_context *ctx)
620 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
623 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
624 as appropriate. */
625 /* See also 'gcc/omp-oacc-neuter-broadcast.cc:oacc_build_component_ref'. */
627 static tree
628 omp_build_component_ref (tree obj, tree field)
630 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
631 if (TREE_THIS_VOLATILE (field))
632 TREE_THIS_VOLATILE (ret) |= 1;
633 if (TREE_READONLY (field))
634 TREE_READONLY (ret) |= 1;
635 return ret;
638 /* Build tree nodes to access the field for VAR on the receiver side. */
640 static tree
641 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
643 tree x, field = lookup_field (var, ctx);
645 /* If the receiver record type was remapped in the child function,
646 remap the field into the new record type. */
647 x = maybe_lookup_field (field, ctx);
648 if (x != NULL)
649 field = x;
651 x = build_simple_mem_ref (ctx->receiver_decl);
652 TREE_THIS_NOTRAP (x) = 1;
653 x = omp_build_component_ref (x, field);
654 if (by_ref)
656 x = build_simple_mem_ref (x);
657 TREE_THIS_NOTRAP (x) = 1;
660 return x;
663 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
664 of a parallel, this is a component reference; for workshare constructs
665 this is some variable. */
667 static tree
668 build_outer_var_ref (tree var, omp_context *ctx,
669 enum omp_clause_code code = OMP_CLAUSE_ERROR)
671 tree x;
672 omp_context *outer = ctx->outer;
673 for (; outer; outer = outer->outer)
675 if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
676 continue;
677 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
678 && !maybe_lookup_decl (var, outer))
679 continue;
680 break;
683 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
684 x = var;
685 else if (is_variable_sized (var))
687 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
688 x = build_outer_var_ref (x, ctx, code);
689 x = build_simple_mem_ref (x);
691 else if (is_taskreg_ctx (ctx))
693 bool by_ref = use_pointer_for_field (var, NULL);
694 x = build_receiver_ref (var, by_ref, ctx);
696 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
697 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
698 || ctx->loop_p
699 || (code == OMP_CLAUSE_PRIVATE
700 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
701 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
702 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
704 /* #pragma omp simd isn't a worksharing construct, and can reference
705 even private vars in its linear etc. clauses.
706 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
707 to private vars in all worksharing constructs. */
708 x = NULL_TREE;
709 if (outer && is_taskreg_ctx (outer))
710 x = lookup_decl (var, outer);
711 else if (outer)
712 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
713 if (x == NULL_TREE)
714 x = var;
716 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
718 gcc_assert (outer);
719 splay_tree_node n
720 = splay_tree_lookup (outer->field_map,
721 (splay_tree_key) &DECL_UID (var));
722 if (n == NULL)
724 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
725 x = var;
726 else
727 x = lookup_decl (var, outer);
729 else
731 tree field = (tree) n->value;
732 /* If the receiver record type was remapped in the child function,
733 remap the field into the new record type. */
734 x = maybe_lookup_field (field, outer);
735 if (x != NULL)
736 field = x;
738 x = build_simple_mem_ref (outer->receiver_decl);
739 x = omp_build_component_ref (x, field);
740 if (use_pointer_for_field (var, outer))
741 x = build_simple_mem_ref (x);
744 else if (outer)
745 x = lookup_decl (var, outer);
746 else if (omp_privatize_by_reference (var))
747 /* This can happen with orphaned constructs. If var is reference, it is
748 possible it is shared and as such valid. */
749 x = var;
750 else if (omp_member_access_dummy_var (var))
751 x = var;
752 else
753 gcc_unreachable ();
755 if (x == var)
757 tree t = omp_member_access_dummy_var (var);
758 if (t)
760 x = DECL_VALUE_EXPR (var);
761 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
762 if (o != t)
763 x = unshare_and_remap (x, t, o);
764 else
765 x = unshare_expr (x);
769 if (omp_privatize_by_reference (var))
770 x = build_simple_mem_ref (x);
772 return x;
775 /* Build tree nodes to access the field for VAR on the sender side. */
777 static tree
778 build_sender_ref (splay_tree_key key, omp_context *ctx)
780 tree field = lookup_sfield (key, ctx);
781 return omp_build_component_ref (ctx->sender_decl, field);
784 static tree
785 build_sender_ref (tree var, omp_context *ctx)
787 return build_sender_ref ((splay_tree_key) var, ctx);
790 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
791 BASE_POINTERS_RESTRICT, declare the field with restrict. */
793 static void
794 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
796 tree field, type, sfield = NULL_TREE;
797 splay_tree_key key = (splay_tree_key) var;
799 if ((mask & 16) != 0)
801 key = (splay_tree_key) &DECL_NAME (var);
802 gcc_checking_assert (key != (splay_tree_key) var);
804 if ((mask & 8) != 0)
806 key = (splay_tree_key) &DECL_UID (var);
807 gcc_checking_assert (key != (splay_tree_key) var);
809 gcc_assert ((mask & 1) == 0
810 || !splay_tree_lookup (ctx->field_map, key));
811 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
812 || !splay_tree_lookup (ctx->sfield_map, key));
813 gcc_assert ((mask & 3) == 3
814 || !is_gimple_omp_oacc (ctx->stmt));
816 type = TREE_TYPE (var);
817 if ((mask & 16) != 0)
818 type = lang_hooks.decls.omp_array_data (var, true);
820 /* Prevent redeclaring the var in the split-off function with a restrict
821 pointer type. Note that we only clear type itself, restrict qualifiers in
822 the pointed-to type will be ignored by points-to analysis. */
823 if (POINTER_TYPE_P (type)
824 && TYPE_RESTRICT (type))
825 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
827 if (mask & 4)
829 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
830 type = build_pointer_type (build_pointer_type (type));
832 else if (by_ref)
833 type = build_pointer_type (type);
834 else if ((mask & (32 | 3)) == 1
835 && omp_privatize_by_reference (var))
836 type = TREE_TYPE (type);
838 field = build_decl (DECL_SOURCE_LOCATION (var),
839 FIELD_DECL, DECL_NAME (var), type);
841 /* Remember what variable this field was created for. This does have a
842 side effect of making dwarf2out ignore this member, so for helpful
843 debugging we clear it later in delete_omp_context. */
844 DECL_ABSTRACT_ORIGIN (field) = var;
845 if ((mask & 16) == 0 && type == TREE_TYPE (var))
847 SET_DECL_ALIGN (field, DECL_ALIGN (var));
848 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
849 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
851 else
852 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
854 if ((mask & 3) == 3)
856 insert_field_into_struct (ctx->record_type, field);
857 if (ctx->srecord_type)
859 sfield = build_decl (DECL_SOURCE_LOCATION (var),
860 FIELD_DECL, DECL_NAME (var), type);
861 DECL_ABSTRACT_ORIGIN (sfield) = var;
862 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
863 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
864 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
865 insert_field_into_struct (ctx->srecord_type, sfield);
868 else
870 if (ctx->srecord_type == NULL_TREE)
872 tree t;
874 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
875 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
876 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
878 sfield = build_decl (DECL_SOURCE_LOCATION (t),
879 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
880 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
881 insert_field_into_struct (ctx->srecord_type, sfield);
882 splay_tree_insert (ctx->sfield_map,
883 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
884 (splay_tree_value) sfield);
887 sfield = field;
888 insert_field_into_struct ((mask & 1) ? ctx->record_type
889 : ctx->srecord_type, field);
892 if (mask & 1)
893 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
894 if ((mask & 2) && ctx->sfield_map)
895 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
898 static tree
899 install_var_local (tree var, omp_context *ctx)
901 tree new_var = omp_copy_decl_1 (var, ctx);
902 insert_decl_map (&ctx->cb, var, new_var);
903 return new_var;
906 /* Adjust the replacement for DECL in CTX for the new context. This means
907 copying the DECL_VALUE_EXPR, and fixing up the type. */
909 static void
910 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
912 tree new_decl, size;
914 new_decl = lookup_decl (decl, ctx);
916 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
918 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
919 && DECL_HAS_VALUE_EXPR_P (decl))
921 tree ve = DECL_VALUE_EXPR (decl);
922 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
923 SET_DECL_VALUE_EXPR (new_decl, ve);
924 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
927 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
929 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
930 if (size == error_mark_node)
931 size = TYPE_SIZE (TREE_TYPE (new_decl));
932 DECL_SIZE (new_decl) = size;
934 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
935 if (size == error_mark_node)
936 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
937 DECL_SIZE_UNIT (new_decl) = size;
941 /* The callback for remap_decl. Search all containing contexts for a
942 mapping of the variable; this avoids having to duplicate the splay
943 tree ahead of time. We know a mapping doesn't already exist in the
944 given context. Create new mappings to implement default semantics. */
946 static tree
947 omp_copy_decl (tree var, copy_body_data *cb)
949 omp_context *ctx = (omp_context *) cb;
950 tree new_var;
952 if (TREE_CODE (var) == LABEL_DECL)
954 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
955 return var;
956 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
957 DECL_CONTEXT (new_var) = current_function_decl;
958 insert_decl_map (&ctx->cb, var, new_var);
959 return new_var;
962 while (!is_taskreg_ctx (ctx))
964 ctx = ctx->outer;
965 if (ctx == NULL)
966 return var;
967 new_var = maybe_lookup_decl (var, ctx);
968 if (new_var)
969 return new_var;
972 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
973 return var;
975 return error_mark_node;
978 /* Create a new context, with OUTER_CTX being the surrounding context. */
980 static omp_context *
981 new_omp_context (gimple *stmt, omp_context *outer_ctx)
983 omp_context *ctx = XCNEW (omp_context);
985 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
986 (splay_tree_value) ctx);
987 ctx->stmt = stmt;
989 if (outer_ctx)
991 ctx->outer = outer_ctx;
992 ctx->cb = outer_ctx->cb;
993 ctx->cb.block = NULL;
994 ctx->depth = outer_ctx->depth + 1;
996 else
998 ctx->cb.src_fn = current_function_decl;
999 ctx->cb.dst_fn = current_function_decl;
1000 ctx->cb.src_node = cgraph_node::get (current_function_decl);
1001 gcc_checking_assert (ctx->cb.src_node);
1002 ctx->cb.dst_node = ctx->cb.src_node;
1003 ctx->cb.src_cfun = cfun;
1004 ctx->cb.copy_decl = omp_copy_decl;
1005 ctx->cb.eh_lp_nr = 0;
1006 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
1007 ctx->cb.adjust_array_error_bounds = true;
1008 ctx->cb.dont_remap_vla_if_no_change = true;
1009 ctx->depth = 1;
1012 ctx->cb.decl_map = new hash_map<tree, tree>;
1014 return ctx;
1017 static gimple_seq maybe_catch_exception (gimple_seq);
1019 /* Finalize task copyfn. */
1021 static void
1022 finalize_task_copyfn (gomp_task *task_stmt)
1024 struct function *child_cfun;
1025 tree child_fn;
1026 gimple_seq seq = NULL, new_seq;
1027 gbind *bind;
1029 child_fn = gimple_omp_task_copy_fn (task_stmt);
1030 if (child_fn == NULL_TREE)
1031 return;
1033 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1034 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1036 push_cfun (child_cfun);
1037 bind = gimplify_body (child_fn, false);
1038 gimple_seq_add_stmt (&seq, bind);
1039 new_seq = maybe_catch_exception (seq);
1040 if (new_seq != seq)
1042 bind = gimple_build_bind (NULL, new_seq, NULL);
1043 seq = NULL;
1044 gimple_seq_add_stmt (&seq, bind);
1046 gimple_set_body (child_fn, seq);
1047 pop_cfun ();
1049 /* Inform the callgraph about the new function. */
1050 cgraph_node *node = cgraph_node::get_create (child_fn);
1051 node->parallelized_function = 1;
1052 cgraph_node::add_new_function (child_fn, false);
1055 /* Destroy a omp_context data structures. Called through the splay tree
1056 value delete callback. */
1058 static void
1059 delete_omp_context (splay_tree_value value)
1061 omp_context *ctx = (omp_context *) value;
1063 delete ctx->cb.decl_map;
1065 if (ctx->field_map)
1066 splay_tree_delete (ctx->field_map);
1067 if (ctx->sfield_map)
1068 splay_tree_delete (ctx->sfield_map);
1070 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1071 it produces corrupt debug information. */
1072 if (ctx->record_type)
1074 tree t;
1075 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1076 DECL_ABSTRACT_ORIGIN (t) = NULL;
1078 if (ctx->srecord_type)
1080 tree t;
1081 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1082 DECL_ABSTRACT_ORIGIN (t) = NULL;
1085 if (is_task_ctx (ctx))
1086 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1088 if (ctx->task_reduction_map)
1090 ctx->task_reductions.release ();
1091 delete ctx->task_reduction_map;
1094 delete ctx->lastprivate_conditional_map;
1095 delete ctx->allocate_map;
1097 XDELETE (ctx);
1100 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1101 context. */
1103 static void
1104 fixup_child_record_type (omp_context *ctx)
1106 tree f, type = ctx->record_type;
1108 if (!ctx->receiver_decl)
1109 return;
1110 /* ??? It isn't sufficient to just call remap_type here, because
1111 variably_modified_type_p doesn't work the way we expect for
1112 record types. Testing each field for whether it needs remapping
1113 and creating a new record by hand works, however. */
1114 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1115 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1116 break;
1117 if (f)
1119 tree name, new_fields = NULL;
1121 type = lang_hooks.types.make_type (RECORD_TYPE);
1122 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1123 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1124 TYPE_DECL, name, type);
1125 TYPE_NAME (type) = name;
1127 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1129 tree new_f = copy_node (f);
1130 DECL_CONTEXT (new_f) = type;
1131 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1132 DECL_CHAIN (new_f) = new_fields;
1133 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1134 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1135 &ctx->cb, NULL);
1136 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1137 &ctx->cb, NULL);
1138 new_fields = new_f;
1140 /* Arrange to be able to look up the receiver field
1141 given the sender field. */
1142 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1143 (splay_tree_value) new_f);
1145 TYPE_FIELDS (type) = nreverse (new_fields);
1146 layout_type (type);
1149 /* In a target region we never modify any of the pointers in *.omp_data_i,
1150 so attempt to help the optimizers. */
1151 if (is_gimple_omp_offloaded (ctx->stmt))
1152 type = build_qualified_type (type, TYPE_QUAL_CONST);
1154 TREE_TYPE (ctx->receiver_decl)
1155 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1158 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1159 specified by CLAUSES. */
1161 static void
1162 scan_sharing_clauses (tree clauses, omp_context *ctx)
1164 tree c, decl;
1165 bool scan_array_reductions = false;
1167 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1168 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1169 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1170 /* omp_default_mem_alloc is 1 */
1171 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1172 || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1174 if (ctx->allocate_map == NULL)
1175 ctx->allocate_map = new hash_map<tree, tree>;
1176 tree val = integer_zero_node;
1177 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1178 val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1179 if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1180 val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1181 ctx->allocate_map->put (OMP_CLAUSE_DECL (c), val);
1184 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1186 bool by_ref;
1188 switch (OMP_CLAUSE_CODE (c))
1190 case OMP_CLAUSE_PRIVATE:
1191 decl = OMP_CLAUSE_DECL (c);
1192 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1193 goto do_private;
1194 else if (!is_variable_sized (decl))
1195 install_var_local (decl, ctx);
1196 break;
1198 case OMP_CLAUSE_SHARED:
1199 decl = OMP_CLAUSE_DECL (c);
1200 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1201 ctx->allocate_map->remove (decl);
1202 /* Ignore shared directives in teams construct inside of
1203 target construct. */
1204 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1205 && !is_host_teams_ctx (ctx))
1207 /* Global variables don't need to be copied,
1208 the receiver side will use them directly. */
1209 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1210 if (is_global_var (odecl))
1211 break;
1212 insert_decl_map (&ctx->cb, decl, odecl);
1213 break;
1215 gcc_assert (is_taskreg_ctx (ctx));
1216 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1217 || !is_variable_sized (decl));
1218 /* Global variables don't need to be copied,
1219 the receiver side will use them directly. */
1220 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1221 break;
1222 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1224 use_pointer_for_field (decl, ctx);
1225 break;
1227 by_ref = use_pointer_for_field (decl, NULL);
1228 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1229 || TREE_ADDRESSABLE (decl)
1230 || by_ref
1231 || omp_privatize_by_reference (decl))
1233 by_ref = use_pointer_for_field (decl, ctx);
1234 install_var_field (decl, by_ref, 3, ctx);
1235 install_var_local (decl, ctx);
1236 break;
1238 /* We don't need to copy const scalar vars back. */
1239 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1240 goto do_private;
1242 case OMP_CLAUSE_REDUCTION:
1243 /* Collect 'reduction' clauses on OpenACC compute construct. */
1244 if (is_gimple_omp_oacc (ctx->stmt)
1245 && is_gimple_omp_offloaded (ctx->stmt))
1247 /* No 'reduction' clauses on OpenACC 'kernels'. */
1248 gcc_checking_assert (!is_oacc_kernels (ctx));
1249 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1250 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1252 ctx->local_reduction_clauses
1253 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1255 /* FALLTHRU */
1257 case OMP_CLAUSE_IN_REDUCTION:
1258 decl = OMP_CLAUSE_DECL (c);
1259 if (ctx->allocate_map
1260 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1261 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1262 || OMP_CLAUSE_REDUCTION_TASK (c)))
1263 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1264 || is_task_ctx (ctx)))
1266 /* For now. */
1267 if (ctx->allocate_map->get (decl))
1268 ctx->allocate_map->remove (decl);
1270 if (TREE_CODE (decl) == MEM_REF)
1272 tree t = TREE_OPERAND (decl, 0);
1273 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1274 t = TREE_OPERAND (t, 0);
1275 if (TREE_CODE (t) == INDIRECT_REF
1276 || TREE_CODE (t) == ADDR_EXPR)
1277 t = TREE_OPERAND (t, 0);
1278 if (is_omp_target (ctx->stmt))
1280 if (is_variable_sized (t))
1282 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1283 t = DECL_VALUE_EXPR (t);
1284 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
1285 t = TREE_OPERAND (t, 0);
1286 gcc_assert (DECL_P (t));
1288 tree at = t;
1289 if (ctx->outer)
1290 scan_omp_op (&at, ctx->outer);
1291 tree nt = omp_copy_decl_1 (at, ctx->outer);
1292 splay_tree_insert (ctx->field_map,
1293 (splay_tree_key) &DECL_CONTEXT (t),
1294 (splay_tree_value) nt);
1295 if (at != t)
1296 splay_tree_insert (ctx->field_map,
1297 (splay_tree_key) &DECL_CONTEXT (at),
1298 (splay_tree_value) nt);
1299 break;
1301 install_var_local (t, ctx);
1302 if (is_taskreg_ctx (ctx)
1303 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1304 || (is_task_ctx (ctx)
1305 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1306 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1307 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1308 == POINTER_TYPE)))))
1309 && !is_variable_sized (t)
1310 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1311 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1312 && !is_task_ctx (ctx))))
1314 by_ref = use_pointer_for_field (t, NULL);
1315 if (is_task_ctx (ctx)
1316 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1317 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1319 install_var_field (t, false, 1, ctx);
1320 install_var_field (t, by_ref, 2, ctx);
1322 else
1323 install_var_field (t, by_ref, 3, ctx);
1325 break;
1327 if (is_omp_target (ctx->stmt))
1329 tree at = decl;
1330 if (ctx->outer)
1331 scan_omp_op (&at, ctx->outer);
1332 tree nt = omp_copy_decl_1 (at, ctx->outer);
1333 splay_tree_insert (ctx->field_map,
1334 (splay_tree_key) &DECL_CONTEXT (decl),
1335 (splay_tree_value) nt);
1336 if (at != decl)
1337 splay_tree_insert (ctx->field_map,
1338 (splay_tree_key) &DECL_CONTEXT (at),
1339 (splay_tree_value) nt);
1340 break;
1342 if (is_task_ctx (ctx)
1343 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1344 && OMP_CLAUSE_REDUCTION_TASK (c)
1345 && is_parallel_ctx (ctx)))
1347 /* Global variables don't need to be copied,
1348 the receiver side will use them directly. */
1349 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1351 by_ref = use_pointer_for_field (decl, ctx);
1352 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1353 install_var_field (decl, by_ref, 3, ctx);
1355 install_var_local (decl, ctx);
1356 break;
1358 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1359 && OMP_CLAUSE_REDUCTION_TASK (c))
1361 install_var_local (decl, ctx);
1362 break;
1364 goto do_private;
1366 case OMP_CLAUSE_LASTPRIVATE:
1367 /* Let the corresponding firstprivate clause create
1368 the variable. */
1369 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1370 break;
1371 /* FALLTHRU */
1373 case OMP_CLAUSE_FIRSTPRIVATE:
1374 case OMP_CLAUSE_LINEAR:
1375 decl = OMP_CLAUSE_DECL (c);
1376 do_private:
1377 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1378 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1379 && is_gimple_omp_offloaded (ctx->stmt))
1381 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1383 by_ref = !omp_privatize_by_reference (decl);
1384 install_var_field (decl, by_ref, 3, ctx);
1386 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1387 install_var_field (decl, true, 3, ctx);
1388 else
1389 install_var_field (decl, false, 3, ctx);
1391 if (is_variable_sized (decl))
1393 if (is_task_ctx (ctx))
1395 if (ctx->allocate_map
1396 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1398 /* For now. */
1399 if (ctx->allocate_map->get (decl))
1400 ctx->allocate_map->remove (decl);
1402 install_var_field (decl, false, 1, ctx);
1404 break;
1406 else if (is_taskreg_ctx (ctx))
1408 bool global
1409 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1410 by_ref = use_pointer_for_field (decl, NULL);
1412 if (is_task_ctx (ctx)
1413 && (global || by_ref || omp_privatize_by_reference (decl)))
1415 if (ctx->allocate_map
1416 && ctx->allocate_map->get (decl))
1417 install_var_field (decl, by_ref, 32 | 1, ctx);
1418 else
1419 install_var_field (decl, false, 1, ctx);
1420 if (!global)
1421 install_var_field (decl, by_ref, 2, ctx);
1423 else if (!global)
1424 install_var_field (decl, by_ref, 3, ctx);
1426 install_var_local (decl, ctx);
1427 break;
1429 case OMP_CLAUSE_USE_DEVICE_PTR:
1430 case OMP_CLAUSE_USE_DEVICE_ADDR:
1431 decl = OMP_CLAUSE_DECL (c);
1433 /* Fortran array descriptors. */
1434 if (lang_hooks.decls.omp_array_data (decl, true))
1435 install_var_field (decl, false, 19, ctx);
1436 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1437 && !omp_privatize_by_reference (decl)
1438 && !omp_is_allocatable_or_ptr (decl))
1439 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1440 install_var_field (decl, true, 11, ctx);
1441 else
1442 install_var_field (decl, false, 11, ctx);
1443 if (DECL_SIZE (decl)
1444 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1446 tree decl2 = DECL_VALUE_EXPR (decl);
1447 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1448 decl2 = TREE_OPERAND (decl2, 0);
1449 gcc_assert (DECL_P (decl2));
1450 install_var_local (decl2, ctx);
1452 install_var_local (decl, ctx);
1453 break;
1455 case OMP_CLAUSE_IS_DEVICE_PTR:
1456 decl = OMP_CLAUSE_DECL (c);
1457 goto do_private;
1459 case OMP_CLAUSE__LOOPTEMP_:
1460 case OMP_CLAUSE__REDUCTEMP_:
1461 gcc_assert (is_taskreg_ctx (ctx));
1462 decl = OMP_CLAUSE_DECL (c);
1463 install_var_field (decl, false, 3, ctx);
1464 install_var_local (decl, ctx);
1465 break;
1467 case OMP_CLAUSE_COPYPRIVATE:
1468 case OMP_CLAUSE_COPYIN:
1469 decl = OMP_CLAUSE_DECL (c);
1470 by_ref = use_pointer_for_field (decl, NULL);
1471 install_var_field (decl, by_ref, 3, ctx);
1472 break;
1474 case OMP_CLAUSE_FINAL:
1475 case OMP_CLAUSE_IF:
1476 case OMP_CLAUSE_NUM_THREADS:
1477 case OMP_CLAUSE_NUM_TEAMS:
1478 case OMP_CLAUSE_THREAD_LIMIT:
1479 case OMP_CLAUSE_DEVICE:
1480 case OMP_CLAUSE_SCHEDULE:
1481 case OMP_CLAUSE_DIST_SCHEDULE:
1482 case OMP_CLAUSE_DEPEND:
1483 case OMP_CLAUSE_PRIORITY:
1484 case OMP_CLAUSE_GRAINSIZE:
1485 case OMP_CLAUSE_NUM_TASKS:
1486 case OMP_CLAUSE_NUM_GANGS:
1487 case OMP_CLAUSE_NUM_WORKERS:
1488 case OMP_CLAUSE_VECTOR_LENGTH:
1489 case OMP_CLAUSE_DETACH:
1490 case OMP_CLAUSE_FILTER:
1491 if (ctx->outer)
1492 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1493 break;
1495 case OMP_CLAUSE_TO:
1496 case OMP_CLAUSE_FROM:
1497 case OMP_CLAUSE_MAP:
1498 if (ctx->outer)
1499 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1500 decl = OMP_CLAUSE_DECL (c);
1501 /* Global variables with "omp declare target" attribute
1502 don't need to be copied, the receiver side will use them
1503 directly. However, global variables with "omp declare target link"
1504 attribute need to be copied. Or when ALWAYS modifier is used. */
1505 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1506 && DECL_P (decl)
1507 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1508 && (OMP_CLAUSE_MAP_KIND (c)
1509 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1510 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1511 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1512 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1513 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1514 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1515 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1516 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1517 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1518 && varpool_node::get_create (decl)->offloadable
1519 && !lookup_attribute ("omp declare target link",
1520 DECL_ATTRIBUTES (decl)))
1521 break;
1522 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1523 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1525 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1526 not offloaded; there is nothing to map for those. */
1527 if (!is_gimple_omp_offloaded (ctx->stmt)
1528 && !POINTER_TYPE_P (TREE_TYPE (decl))
1529 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1530 break;
1532 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1533 && DECL_P (decl)
1534 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1535 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1536 && is_omp_target (ctx->stmt))
1538 /* If this is an offloaded region, an attach operation should
1539 only exist when the pointer variable is mapped in a prior
1540 clause. */
1541 if (is_gimple_omp_offloaded (ctx->stmt))
1542 gcc_assert
1543 (maybe_lookup_decl (decl, ctx)
1544 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1545 && lookup_attribute ("omp declare target",
1546 DECL_ATTRIBUTES (decl))));
1548 /* By itself, attach/detach is generated as part of pointer
1549 variable mapping and should not create new variables in the
1550 offloaded region, however sender refs for it must be created
1551 for its address to be passed to the runtime. */
1552 tree field
1553 = build_decl (OMP_CLAUSE_LOCATION (c),
1554 FIELD_DECL, NULL_TREE, ptr_type_node);
1555 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1556 insert_field_into_struct (ctx->record_type, field);
1557 /* To not clash with a map of the pointer variable itself,
1558 attach/detach maps have their field looked up by the *clause*
1559 tree expression, not the decl. */
1560 gcc_assert (!splay_tree_lookup (ctx->field_map,
1561 (splay_tree_key) c));
1562 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1563 (splay_tree_value) field);
1564 break;
1566 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1567 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1568 || (OMP_CLAUSE_MAP_KIND (c)
1569 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1571 if (TREE_CODE (decl) == COMPONENT_REF
1572 || (TREE_CODE (decl) == INDIRECT_REF
1573 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1574 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1575 == REFERENCE_TYPE)))
1576 break;
1577 if (DECL_SIZE (decl)
1578 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1580 tree decl2 = DECL_VALUE_EXPR (decl);
1581 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1582 decl2 = TREE_OPERAND (decl2, 0);
1583 gcc_assert (DECL_P (decl2));
1584 install_var_local (decl2, ctx);
1586 install_var_local (decl, ctx);
1587 break;
1589 if (DECL_P (decl))
1591 if (DECL_SIZE (decl)
1592 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1594 tree decl2 = DECL_VALUE_EXPR (decl);
1595 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1596 decl2 = TREE_OPERAND (decl2, 0);
1597 gcc_assert (DECL_P (decl2));
1598 install_var_field (decl2, true, 3, ctx);
1599 install_var_local (decl2, ctx);
1600 install_var_local (decl, ctx);
1602 else
1604 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1605 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1606 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1607 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1608 install_var_field (decl, true, 7, ctx);
1609 else
1610 install_var_field (decl, true, 3, ctx);
1611 if (is_gimple_omp_offloaded (ctx->stmt)
1612 && !(is_gimple_omp_oacc (ctx->stmt)
1613 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1614 install_var_local (decl, ctx);
1617 else
1619 tree base = get_base_address (decl);
1620 tree nc = OMP_CLAUSE_CHAIN (c);
1621 if (DECL_P (base)
1622 && nc != NULL_TREE
1623 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1624 && OMP_CLAUSE_DECL (nc) == base
1625 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1626 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1628 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1629 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1631 else
1633 if (ctx->outer)
1635 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1636 decl = OMP_CLAUSE_DECL (c);
1638 gcc_assert (!splay_tree_lookup (ctx->field_map,
1639 (splay_tree_key) decl));
1640 tree field
1641 = build_decl (OMP_CLAUSE_LOCATION (c),
1642 FIELD_DECL, NULL_TREE, ptr_type_node);
1643 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1644 insert_field_into_struct (ctx->record_type, field);
1645 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1646 (splay_tree_value) field);
1649 break;
1651 case OMP_CLAUSE_ORDER:
1652 ctx->order_concurrent = true;
1653 break;
1655 case OMP_CLAUSE_BIND:
1656 ctx->loop_p = true;
1657 break;
1659 case OMP_CLAUSE_NOWAIT:
1660 case OMP_CLAUSE_ORDERED:
1661 case OMP_CLAUSE_COLLAPSE:
1662 case OMP_CLAUSE_UNTIED:
1663 case OMP_CLAUSE_MERGEABLE:
1664 case OMP_CLAUSE_PROC_BIND:
1665 case OMP_CLAUSE_SAFELEN:
1666 case OMP_CLAUSE_SIMDLEN:
1667 case OMP_CLAUSE_THREADS:
1668 case OMP_CLAUSE_SIMD:
1669 case OMP_CLAUSE_NOGROUP:
1670 case OMP_CLAUSE_DEFAULTMAP:
1671 case OMP_CLAUSE_ASYNC:
1672 case OMP_CLAUSE_WAIT:
1673 case OMP_CLAUSE_GANG:
1674 case OMP_CLAUSE_WORKER:
1675 case OMP_CLAUSE_VECTOR:
1676 case OMP_CLAUSE_INDEPENDENT:
1677 case OMP_CLAUSE_AUTO:
1678 case OMP_CLAUSE_SEQ:
1679 case OMP_CLAUSE_TILE:
1680 case OMP_CLAUSE__SIMT_:
1681 case OMP_CLAUSE_DEFAULT:
1682 case OMP_CLAUSE_NONTEMPORAL:
1683 case OMP_CLAUSE_IF_PRESENT:
1684 case OMP_CLAUSE_FINALIZE:
1685 case OMP_CLAUSE_TASK_REDUCTION:
1686 case OMP_CLAUSE_ALLOCATE:
1687 break;
1689 case OMP_CLAUSE_ALIGNED:
1690 decl = OMP_CLAUSE_DECL (c);
1691 if (is_global_var (decl)
1692 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1693 install_var_local (decl, ctx);
1694 break;
1696 case OMP_CLAUSE__CONDTEMP_:
1697 decl = OMP_CLAUSE_DECL (c);
1698 if (is_parallel_ctx (ctx))
1700 install_var_field (decl, false, 3, ctx);
1701 install_var_local (decl, ctx);
1703 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1704 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1705 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1706 install_var_local (decl, ctx);
1707 break;
1709 case OMP_CLAUSE__CACHE_:
1710 case OMP_CLAUSE_NOHOST:
1711 default:
1712 gcc_unreachable ();
1716 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1718 switch (OMP_CLAUSE_CODE (c))
1720 case OMP_CLAUSE_LASTPRIVATE:
1721 /* Let the corresponding firstprivate clause create
1722 the variable. */
1723 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1724 scan_array_reductions = true;
1725 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1726 break;
1727 /* FALLTHRU */
1729 case OMP_CLAUSE_FIRSTPRIVATE:
1730 case OMP_CLAUSE_PRIVATE:
1731 case OMP_CLAUSE_LINEAR:
1732 case OMP_CLAUSE_IS_DEVICE_PTR:
1733 decl = OMP_CLAUSE_DECL (c);
1734 if (is_variable_sized (decl))
1736 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1737 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1738 && is_gimple_omp_offloaded (ctx->stmt))
1740 tree decl2 = DECL_VALUE_EXPR (decl);
1741 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1742 decl2 = TREE_OPERAND (decl2, 0);
1743 gcc_assert (DECL_P (decl2));
1744 install_var_local (decl2, ctx);
1745 fixup_remapped_decl (decl2, ctx, false);
1747 install_var_local (decl, ctx);
1749 fixup_remapped_decl (decl, ctx,
1750 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1751 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1752 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1753 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1754 scan_array_reductions = true;
1755 break;
1757 case OMP_CLAUSE_REDUCTION:
1758 case OMP_CLAUSE_IN_REDUCTION:
1759 decl = OMP_CLAUSE_DECL (c);
1760 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1762 if (is_variable_sized (decl))
1763 install_var_local (decl, ctx);
1764 fixup_remapped_decl (decl, ctx, false);
1766 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1767 scan_array_reductions = true;
1768 break;
1770 case OMP_CLAUSE_TASK_REDUCTION:
1771 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1772 scan_array_reductions = true;
1773 break;
1775 case OMP_CLAUSE_SHARED:
1776 /* Ignore shared directives in teams construct inside of
1777 target construct. */
1778 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1779 && !is_host_teams_ctx (ctx))
1780 break;
1781 decl = OMP_CLAUSE_DECL (c);
1782 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1783 break;
1784 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1786 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1787 ctx->outer)))
1788 break;
1789 bool by_ref = use_pointer_for_field (decl, ctx);
1790 install_var_field (decl, by_ref, 11, ctx);
1791 break;
1793 fixup_remapped_decl (decl, ctx, false);
1794 break;
1796 case OMP_CLAUSE_MAP:
1797 if (!is_gimple_omp_offloaded (ctx->stmt))
1798 break;
1799 decl = OMP_CLAUSE_DECL (c);
1800 if (DECL_P (decl)
1801 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1802 && (OMP_CLAUSE_MAP_KIND (c)
1803 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1804 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1805 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1806 && varpool_node::get_create (decl)->offloadable)
1807 break;
1808 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1809 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1810 && is_omp_target (ctx->stmt)
1811 && !is_gimple_omp_offloaded (ctx->stmt))
1812 break;
1813 if (DECL_P (decl))
1815 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1816 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1817 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1818 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1820 tree new_decl = lookup_decl (decl, ctx);
1821 TREE_TYPE (new_decl)
1822 = remap_type (TREE_TYPE (decl), &ctx->cb);
1824 else if (DECL_SIZE (decl)
1825 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1827 tree decl2 = DECL_VALUE_EXPR (decl);
1828 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1829 decl2 = TREE_OPERAND (decl2, 0);
1830 gcc_assert (DECL_P (decl2));
1831 fixup_remapped_decl (decl2, ctx, false);
1832 fixup_remapped_decl (decl, ctx, true);
1834 else
1835 fixup_remapped_decl (decl, ctx, false);
1837 break;
1839 case OMP_CLAUSE_COPYPRIVATE:
1840 case OMP_CLAUSE_COPYIN:
1841 case OMP_CLAUSE_DEFAULT:
1842 case OMP_CLAUSE_IF:
1843 case OMP_CLAUSE_NUM_THREADS:
1844 case OMP_CLAUSE_NUM_TEAMS:
1845 case OMP_CLAUSE_THREAD_LIMIT:
1846 case OMP_CLAUSE_DEVICE:
1847 case OMP_CLAUSE_SCHEDULE:
1848 case OMP_CLAUSE_DIST_SCHEDULE:
1849 case OMP_CLAUSE_NOWAIT:
1850 case OMP_CLAUSE_ORDERED:
1851 case OMP_CLAUSE_COLLAPSE:
1852 case OMP_CLAUSE_UNTIED:
1853 case OMP_CLAUSE_FINAL:
1854 case OMP_CLAUSE_MERGEABLE:
1855 case OMP_CLAUSE_PROC_BIND:
1856 case OMP_CLAUSE_SAFELEN:
1857 case OMP_CLAUSE_SIMDLEN:
1858 case OMP_CLAUSE_ALIGNED:
1859 case OMP_CLAUSE_DEPEND:
1860 case OMP_CLAUSE_DETACH:
1861 case OMP_CLAUSE_ALLOCATE:
1862 case OMP_CLAUSE__LOOPTEMP_:
1863 case OMP_CLAUSE__REDUCTEMP_:
1864 case OMP_CLAUSE_TO:
1865 case OMP_CLAUSE_FROM:
1866 case OMP_CLAUSE_PRIORITY:
1867 case OMP_CLAUSE_GRAINSIZE:
1868 case OMP_CLAUSE_NUM_TASKS:
1869 case OMP_CLAUSE_THREADS:
1870 case OMP_CLAUSE_SIMD:
1871 case OMP_CLAUSE_NOGROUP:
1872 case OMP_CLAUSE_DEFAULTMAP:
1873 case OMP_CLAUSE_ORDER:
1874 case OMP_CLAUSE_BIND:
1875 case OMP_CLAUSE_USE_DEVICE_PTR:
1876 case OMP_CLAUSE_USE_DEVICE_ADDR:
1877 case OMP_CLAUSE_NONTEMPORAL:
1878 case OMP_CLAUSE_ASYNC:
1879 case OMP_CLAUSE_WAIT:
1880 case OMP_CLAUSE_NUM_GANGS:
1881 case OMP_CLAUSE_NUM_WORKERS:
1882 case OMP_CLAUSE_VECTOR_LENGTH:
1883 case OMP_CLAUSE_GANG:
1884 case OMP_CLAUSE_WORKER:
1885 case OMP_CLAUSE_VECTOR:
1886 case OMP_CLAUSE_INDEPENDENT:
1887 case OMP_CLAUSE_AUTO:
1888 case OMP_CLAUSE_SEQ:
1889 case OMP_CLAUSE_TILE:
1890 case OMP_CLAUSE__SIMT_:
1891 case OMP_CLAUSE_IF_PRESENT:
1892 case OMP_CLAUSE_FINALIZE:
1893 case OMP_CLAUSE_FILTER:
1894 case OMP_CLAUSE__CONDTEMP_:
1895 break;
1897 case OMP_CLAUSE__CACHE_:
1898 case OMP_CLAUSE_NOHOST:
1899 default:
1900 gcc_unreachable ();
1904 gcc_checking_assert (!scan_array_reductions
1905 || !is_gimple_omp_oacc (ctx->stmt));
1906 if (scan_array_reductions)
1908 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1909 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1910 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1911 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1912 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1914 omp_context *rctx = ctx;
1915 if (is_omp_target (ctx->stmt))
1916 rctx = ctx->outer;
1917 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1918 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
1920 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1921 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1922 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1923 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1924 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1925 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1929 /* Create a new name for omp child function. Returns an identifier. */
1931 static tree
1932 create_omp_child_function_name (bool task_copy)
1934 return clone_function_name_numbered (current_function_decl,
1935 task_copy ? "_omp_cpyfn" : "_omp_fn");
1938 /* Return true if CTX may belong to offloaded code: either if current function
1939 is offloaded, or any enclosing context corresponds to a target region. */
1941 static bool
1942 omp_maybe_offloaded_ctx (omp_context *ctx)
1944 if (cgraph_node::get (current_function_decl)->offloadable)
1945 return true;
1946 for (; ctx; ctx = ctx->outer)
1947 if (is_gimple_omp_offloaded (ctx->stmt))
1948 return true;
1949 return false;
1952 /* Build a decl for the omp child function. It'll not contain a body
1953 yet, just the bare decl. */
1955 static void
1956 create_omp_child_function (omp_context *ctx, bool task_copy)
1958 tree decl, type, name, t;
1960 name = create_omp_child_function_name (task_copy);
1961 if (task_copy)
1962 type = build_function_type_list (void_type_node, ptr_type_node,
1963 ptr_type_node, NULL_TREE);
1964 else
1965 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1967 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1969 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1970 || !task_copy);
1971 if (!task_copy)
1972 ctx->cb.dst_fn = decl;
1973 else
1974 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1976 TREE_STATIC (decl) = 1;
1977 TREE_USED (decl) = 1;
1978 DECL_ARTIFICIAL (decl) = 1;
1979 DECL_IGNORED_P (decl) = 0;
1980 TREE_PUBLIC (decl) = 0;
1981 DECL_UNINLINABLE (decl) = 1;
1982 DECL_EXTERNAL (decl) = 0;
1983 DECL_CONTEXT (decl) = NULL_TREE;
1984 DECL_INITIAL (decl) = make_node (BLOCK);
1985 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1986 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1987 /* Remove omp declare simd attribute from the new attributes. */
1988 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1990 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1991 a = a2;
1992 a = TREE_CHAIN (a);
1993 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1994 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1995 *p = TREE_CHAIN (*p);
1996 else
1998 tree chain = TREE_CHAIN (*p);
1999 *p = copy_node (*p);
2000 p = &TREE_CHAIN (*p);
2001 *p = chain;
2004 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
2005 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
2006 DECL_FUNCTION_SPECIFIC_TARGET (decl)
2007 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2008 DECL_FUNCTION_VERSIONED (decl)
2009 = DECL_FUNCTION_VERSIONED (current_function_decl);
2011 if (omp_maybe_offloaded_ctx (ctx))
2013 cgraph_node::get_create (decl)->offloadable = 1;
2014 if (ENABLE_OFFLOADING)
2015 g->have_offload = true;
2018 if (cgraph_node::get_create (decl)->offloadable)
2020 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2021 ? "omp target entrypoint"
2022 : "omp declare target");
2023 if (lookup_attribute ("omp declare target",
2024 DECL_ATTRIBUTES (current_function_decl)))
2026 if (is_gimple_omp_offloaded (ctx->stmt))
2027 DECL_ATTRIBUTES (decl)
2028 = remove_attribute ("omp declare target",
2029 copy_list (DECL_ATTRIBUTES (decl)));
2030 else
2031 target_attr = NULL;
2033 if (target_attr)
2034 DECL_ATTRIBUTES (decl)
2035 = tree_cons (get_identifier (target_attr),
2036 NULL_TREE, DECL_ATTRIBUTES (decl));
2039 t = build_decl (DECL_SOURCE_LOCATION (decl),
2040 RESULT_DECL, NULL_TREE, void_type_node);
2041 DECL_ARTIFICIAL (t) = 1;
2042 DECL_IGNORED_P (t) = 1;
2043 DECL_CONTEXT (t) = decl;
2044 DECL_RESULT (decl) = t;
2046 tree data_name = get_identifier (".omp_data_i");
2047 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2048 ptr_type_node);
2049 DECL_ARTIFICIAL (t) = 1;
2050 DECL_NAMELESS (t) = 1;
2051 DECL_ARG_TYPE (t) = ptr_type_node;
2052 DECL_CONTEXT (t) = current_function_decl;
2053 TREE_USED (t) = 1;
2054 TREE_READONLY (t) = 1;
2055 DECL_ARGUMENTS (decl) = t;
2056 if (!task_copy)
2057 ctx->receiver_decl = t;
2058 else
2060 t = build_decl (DECL_SOURCE_LOCATION (decl),
2061 PARM_DECL, get_identifier (".omp_data_o"),
2062 ptr_type_node);
2063 DECL_ARTIFICIAL (t) = 1;
2064 DECL_NAMELESS (t) = 1;
2065 DECL_ARG_TYPE (t) = ptr_type_node;
2066 DECL_CONTEXT (t) = current_function_decl;
2067 TREE_USED (t) = 1;
2068 TREE_ADDRESSABLE (t) = 1;
2069 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2070 DECL_ARGUMENTS (decl) = t;
2073 /* Allocate memory for the function structure. The call to
2074 allocate_struct_function clobbers CFUN, so we need to restore
2075 it afterward. */
2076 push_struct_function (decl);
2077 cfun->function_end_locus = gimple_location (ctx->stmt);
2078 init_tree_ssa (cfun);
2079 pop_cfun ();
2082 /* Callback for walk_gimple_seq. Check if combined parallel
2083 contains gimple_omp_for_combined_into_p OMP_FOR. */
2085 tree
2086 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2087 bool *handled_ops_p,
2088 struct walk_stmt_info *wi)
2090 gimple *stmt = gsi_stmt (*gsi_p);
2092 *handled_ops_p = true;
2093 switch (gimple_code (stmt))
2095 WALK_SUBSTMTS;
2097 case GIMPLE_OMP_FOR:
2098 if (gimple_omp_for_combined_into_p (stmt)
2099 && gimple_omp_for_kind (stmt)
2100 == *(const enum gf_mask *) (wi->info))
2102 wi->info = stmt;
2103 return integer_zero_node;
2105 break;
2106 default:
2107 break;
2109 return NULL;
2112 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2114 static void
2115 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2116 omp_context *outer_ctx)
2118 struct walk_stmt_info wi;
2120 memset (&wi, 0, sizeof (wi));
2121 wi.val_only = true;
2122 wi.info = (void *) &msk;
2123 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2124 if (wi.info != (void *) &msk)
2126 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2127 struct omp_for_data fd;
2128 omp_extract_for_data (for_stmt, &fd, NULL);
2129 /* We need two temporaries with fd.loop.v type (istart/iend)
2130 and then (fd.collapse - 1) temporaries with the same
2131 type for count2 ... countN-1 vars if not constant. */
2132 size_t count = 2, i;
2133 tree type = fd.iter_type;
2134 if (fd.collapse > 1
2135 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2137 count += fd.collapse - 1;
2138 /* If there are lastprivate clauses on the inner
2139 GIMPLE_OMP_FOR, add one more temporaries for the total number
2140 of iterations (product of count1 ... countN-1). */
2141 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2142 OMP_CLAUSE_LASTPRIVATE)
2143 || (msk == GF_OMP_FOR_KIND_FOR
2144 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2145 OMP_CLAUSE_LASTPRIVATE)))
2147 tree temp = create_tmp_var (type);
2148 tree c = build_omp_clause (UNKNOWN_LOCATION,
2149 OMP_CLAUSE__LOOPTEMP_);
2150 insert_decl_map (&outer_ctx->cb, temp, temp);
2151 OMP_CLAUSE_DECL (c) = temp;
2152 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2153 gimple_omp_taskreg_set_clauses (stmt, c);
2155 if (fd.non_rect
2156 && fd.last_nonrect == fd.first_nonrect + 1)
2157 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2158 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2160 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2161 tree type2 = TREE_TYPE (v);
2162 count++;
2163 for (i = 0; i < 3; i++)
2165 tree temp = create_tmp_var (type2);
2166 tree c = build_omp_clause (UNKNOWN_LOCATION,
2167 OMP_CLAUSE__LOOPTEMP_);
2168 insert_decl_map (&outer_ctx->cb, temp, temp);
2169 OMP_CLAUSE_DECL (c) = temp;
2170 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2171 gimple_omp_taskreg_set_clauses (stmt, c);
2175 for (i = 0; i < count; i++)
2177 tree temp = create_tmp_var (type);
2178 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2179 insert_decl_map (&outer_ctx->cb, temp, temp);
2180 OMP_CLAUSE_DECL (c) = temp;
2181 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2182 gimple_omp_taskreg_set_clauses (stmt, c);
2185 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2186 && omp_find_clause (gimple_omp_task_clauses (stmt),
2187 OMP_CLAUSE_REDUCTION))
2189 tree type = build_pointer_type (pointer_sized_int_node);
2190 tree temp = create_tmp_var (type);
2191 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2192 insert_decl_map (&outer_ctx->cb, temp, temp);
2193 OMP_CLAUSE_DECL (c) = temp;
2194 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2195 gimple_omp_task_set_clauses (stmt, c);
2199 /* Scan an OpenMP parallel directive. */
2201 static void
2202 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2204 omp_context *ctx;
2205 tree name;
2206 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2208 /* Ignore parallel directives with empty bodies, unless there
2209 are copyin clauses. */
2210 if (optimize > 0
2211 && empty_body_p (gimple_omp_body (stmt))
2212 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2213 OMP_CLAUSE_COPYIN) == NULL)
2215 gsi_replace (gsi, gimple_build_nop (), false);
2216 return;
2219 if (gimple_omp_parallel_combined_p (stmt))
2220 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2221 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2222 OMP_CLAUSE_REDUCTION);
2223 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2224 if (OMP_CLAUSE_REDUCTION_TASK (c))
2226 tree type = build_pointer_type (pointer_sized_int_node);
2227 tree temp = create_tmp_var (type);
2228 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2229 if (outer_ctx)
2230 insert_decl_map (&outer_ctx->cb, temp, temp);
2231 OMP_CLAUSE_DECL (c) = temp;
2232 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2233 gimple_omp_parallel_set_clauses (stmt, c);
2234 break;
2236 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2237 break;
2239 ctx = new_omp_context (stmt, outer_ctx);
2240 taskreg_contexts.safe_push (ctx);
2241 if (taskreg_nesting_level > 1)
2242 ctx->is_nested = true;
2243 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2244 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2245 name = create_tmp_var_name (".omp_data_s");
2246 name = build_decl (gimple_location (stmt),
2247 TYPE_DECL, name, ctx->record_type);
2248 DECL_ARTIFICIAL (name) = 1;
2249 DECL_NAMELESS (name) = 1;
2250 TYPE_NAME (ctx->record_type) = name;
2251 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2252 create_omp_child_function (ctx, false);
2253 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2255 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2256 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2258 if (TYPE_FIELDS (ctx->record_type) == NULL)
2259 ctx->record_type = ctx->receiver_decl = NULL;
2262 /* Scan an OpenMP task directive. */
2264 static void
2265 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2267 omp_context *ctx;
2268 tree name, t;
2269 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2271 /* Ignore task directives with empty bodies, unless they have depend
2272 clause. */
2273 if (optimize > 0
2274 && gimple_omp_body (stmt)
2275 && empty_body_p (gimple_omp_body (stmt))
2276 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2278 gsi_replace (gsi, gimple_build_nop (), false);
2279 return;
2282 if (gimple_omp_task_taskloop_p (stmt))
2283 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2285 ctx = new_omp_context (stmt, outer_ctx);
2287 if (gimple_omp_task_taskwait_p (stmt))
2289 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2290 return;
2293 taskreg_contexts.safe_push (ctx);
2294 if (taskreg_nesting_level > 1)
2295 ctx->is_nested = true;
2296 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2297 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2298 name = create_tmp_var_name (".omp_data_s");
2299 name = build_decl (gimple_location (stmt),
2300 TYPE_DECL, name, ctx->record_type);
2301 DECL_ARTIFICIAL (name) = 1;
2302 DECL_NAMELESS (name) = 1;
2303 TYPE_NAME (ctx->record_type) = name;
2304 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2305 create_omp_child_function (ctx, false);
2306 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2308 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2310 if (ctx->srecord_type)
2312 name = create_tmp_var_name (".omp_data_a");
2313 name = build_decl (gimple_location (stmt),
2314 TYPE_DECL, name, ctx->srecord_type);
2315 DECL_ARTIFICIAL (name) = 1;
2316 DECL_NAMELESS (name) = 1;
2317 TYPE_NAME (ctx->srecord_type) = name;
2318 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2319 create_omp_child_function (ctx, true);
2322 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2324 if (TYPE_FIELDS (ctx->record_type) == NULL)
2326 ctx->record_type = ctx->receiver_decl = NULL;
2327 t = build_int_cst (long_integer_type_node, 0);
2328 gimple_omp_task_set_arg_size (stmt, t);
2329 t = build_int_cst (long_integer_type_node, 1);
2330 gimple_omp_task_set_arg_align (stmt, t);
2334 /* Helper function for finish_taskreg_scan, called through walk_tree.
2335 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2336 tree, replace it in the expression. */
2338 static tree
2339 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2341 if (VAR_P (*tp))
2343 omp_context *ctx = (omp_context *) data;
2344 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2345 if (t != *tp)
2347 if (DECL_HAS_VALUE_EXPR_P (t))
2348 t = unshare_expr (DECL_VALUE_EXPR (t));
2349 *tp = t;
2351 *walk_subtrees = 0;
2353 else if (IS_TYPE_OR_DECL_P (*tp))
2354 *walk_subtrees = 0;
2355 return NULL_TREE;
2358 /* If any decls have been made addressable during scan_omp,
2359 adjust their fields if needed, and layout record types
2360 of parallel/task constructs. */
2362 static void
2363 finish_taskreg_scan (omp_context *ctx)
2365 if (ctx->record_type == NULL_TREE)
2366 return;
2368 /* If any task_shared_vars were needed, verify all
2369 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2370 statements if use_pointer_for_field hasn't changed
2371 because of that. If it did, update field types now. */
2372 if (task_shared_vars)
2374 tree c;
2376 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2377 c; c = OMP_CLAUSE_CHAIN (c))
2378 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2379 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2381 tree decl = OMP_CLAUSE_DECL (c);
2383 /* Global variables don't need to be copied,
2384 the receiver side will use them directly. */
2385 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2386 continue;
2387 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2388 || !use_pointer_for_field (decl, ctx))
2389 continue;
2390 tree field = lookup_field (decl, ctx);
2391 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2392 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2393 continue;
2394 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2395 TREE_THIS_VOLATILE (field) = 0;
2396 DECL_USER_ALIGN (field) = 0;
2397 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2398 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2399 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2400 if (ctx->srecord_type)
2402 tree sfield = lookup_sfield (decl, ctx);
2403 TREE_TYPE (sfield) = TREE_TYPE (field);
2404 TREE_THIS_VOLATILE (sfield) = 0;
2405 DECL_USER_ALIGN (sfield) = 0;
2406 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2407 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2408 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2413 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2415 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2416 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2417 if (c)
2419 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2420 expects to find it at the start of data. */
2421 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2422 tree *p = &TYPE_FIELDS (ctx->record_type);
2423 while (*p)
2424 if (*p == f)
2426 *p = DECL_CHAIN (*p);
2427 break;
2429 else
2430 p = &DECL_CHAIN (*p);
2431 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2432 TYPE_FIELDS (ctx->record_type) = f;
2434 layout_type (ctx->record_type);
2435 fixup_child_record_type (ctx);
2437 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2439 layout_type (ctx->record_type);
2440 fixup_child_record_type (ctx);
2442 else
2444 location_t loc = gimple_location (ctx->stmt);
2445 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2446 tree detach_clause
2447 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2448 OMP_CLAUSE_DETACH);
2449 /* Move VLA fields to the end. */
2450 p = &TYPE_FIELDS (ctx->record_type);
2451 while (*p)
2452 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2453 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2455 *q = *p;
2456 *p = TREE_CHAIN (*p);
2457 TREE_CHAIN (*q) = NULL_TREE;
2458 q = &TREE_CHAIN (*q);
2460 else
2461 p = &DECL_CHAIN (*p);
2462 *p = vla_fields;
2463 if (gimple_omp_task_taskloop_p (ctx->stmt))
2465 /* Move fields corresponding to first and second _looptemp_
2466 clause first. There are filled by GOMP_taskloop
2467 and thus need to be in specific positions. */
2468 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2469 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2470 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2471 OMP_CLAUSE__LOOPTEMP_);
2472 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2473 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2474 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2475 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2476 p = &TYPE_FIELDS (ctx->record_type);
2477 while (*p)
2478 if (*p == f1 || *p == f2 || *p == f3)
2479 *p = DECL_CHAIN (*p);
2480 else
2481 p = &DECL_CHAIN (*p);
2482 DECL_CHAIN (f1) = f2;
2483 if (c3)
2485 DECL_CHAIN (f2) = f3;
2486 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2488 else
2489 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2490 TYPE_FIELDS (ctx->record_type) = f1;
2491 if (ctx->srecord_type)
2493 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2494 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2495 if (c3)
2496 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2497 p = &TYPE_FIELDS (ctx->srecord_type);
2498 while (*p)
2499 if (*p == f1 || *p == f2 || *p == f3)
2500 *p = DECL_CHAIN (*p);
2501 else
2502 p = &DECL_CHAIN (*p);
2503 DECL_CHAIN (f1) = f2;
2504 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2505 if (c3)
2507 DECL_CHAIN (f2) = f3;
2508 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2510 else
2511 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2512 TYPE_FIELDS (ctx->srecord_type) = f1;
2515 if (detach_clause)
2517 tree c, field;
2519 /* Look for a firstprivate clause with the detach event handle. */
2520 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2521 c; c = OMP_CLAUSE_CHAIN (c))
2523 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2524 continue;
2525 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2526 == OMP_CLAUSE_DECL (detach_clause))
2527 break;
2530 gcc_assert (c);
2531 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2533 /* Move field corresponding to the detach clause first.
2534 This is filled by GOMP_task and needs to be in a
2535 specific position. */
2536 p = &TYPE_FIELDS (ctx->record_type);
2537 while (*p)
2538 if (*p == field)
2539 *p = DECL_CHAIN (*p);
2540 else
2541 p = &DECL_CHAIN (*p);
2542 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2543 TYPE_FIELDS (ctx->record_type) = field;
2544 if (ctx->srecord_type)
2546 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2547 p = &TYPE_FIELDS (ctx->srecord_type);
2548 while (*p)
2549 if (*p == field)
2550 *p = DECL_CHAIN (*p);
2551 else
2552 p = &DECL_CHAIN (*p);
2553 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2554 TYPE_FIELDS (ctx->srecord_type) = field;
2557 layout_type (ctx->record_type);
2558 fixup_child_record_type (ctx);
2559 if (ctx->srecord_type)
2560 layout_type (ctx->srecord_type);
2561 tree t = fold_convert_loc (loc, long_integer_type_node,
2562 TYPE_SIZE_UNIT (ctx->record_type));
2563 if (TREE_CODE (t) != INTEGER_CST)
2565 t = unshare_expr (t);
2566 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2568 gimple_omp_task_set_arg_size (ctx->stmt, t);
2569 t = build_int_cst (long_integer_type_node,
2570 TYPE_ALIGN_UNIT (ctx->record_type));
2571 gimple_omp_task_set_arg_align (ctx->stmt, t);
2575 /* Find the enclosing offload context. */
2577 static omp_context *
2578 enclosing_target_ctx (omp_context *ctx)
2580 for (; ctx; ctx = ctx->outer)
2581 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2582 break;
2584 return ctx;
2587 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2588 construct.
2589 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2591 static bool
2592 ctx_in_oacc_kernels_region (omp_context *ctx)
2594 for (;ctx != NULL; ctx = ctx->outer)
2596 gimple *stmt = ctx->stmt;
2597 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2598 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2599 return true;
2602 return false;
2605 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2606 (This doesn't include OpenACC 'kernels' decomposed parts.)
2607 Until kernels handling moves to use the same loop indirection
2608 scheme as parallel, we need to do this checking early. */
2610 static unsigned
2611 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2613 bool checking = true;
2614 unsigned outer_mask = 0;
2615 unsigned this_mask = 0;
2616 bool has_seq = false, has_auto = false;
2618 if (ctx->outer)
2619 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2620 if (!stmt)
2622 checking = false;
2623 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2624 return outer_mask;
2625 stmt = as_a <gomp_for *> (ctx->stmt);
2628 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2630 switch (OMP_CLAUSE_CODE (c))
2632 case OMP_CLAUSE_GANG:
2633 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2634 break;
2635 case OMP_CLAUSE_WORKER:
2636 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2637 break;
2638 case OMP_CLAUSE_VECTOR:
2639 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2640 break;
2641 case OMP_CLAUSE_SEQ:
2642 has_seq = true;
2643 break;
2644 case OMP_CLAUSE_AUTO:
2645 has_auto = true;
2646 break;
2647 default:
2648 break;
2652 if (checking)
2654 if (has_seq && (this_mask || has_auto))
2655 error_at (gimple_location (stmt), "%<seq%> overrides other"
2656 " OpenACC loop specifiers");
2657 else if (has_auto && this_mask)
2658 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2659 " OpenACC loop specifiers");
2661 if (this_mask & outer_mask)
2662 error_at (gimple_location (stmt), "inner loop uses same"
2663 " OpenACC parallelism as containing loop");
2666 return outer_mask | this_mask;
2669 /* Scan a GIMPLE_OMP_FOR. */
2671 static omp_context *
2672 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2674 omp_context *ctx;
2675 size_t i;
2676 tree clauses = gimple_omp_for_clauses (stmt);
2678 ctx = new_omp_context (stmt, outer_ctx);
2680 if (is_gimple_omp_oacc (stmt))
2682 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2684 if (!(tgt && is_oacc_kernels (tgt)))
2685 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2687 tree c_op0;
2688 switch (OMP_CLAUSE_CODE (c))
2690 case OMP_CLAUSE_GANG:
2691 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2692 break;
2694 case OMP_CLAUSE_WORKER:
2695 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2696 break;
2698 case OMP_CLAUSE_VECTOR:
2699 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2700 break;
2702 default:
2703 continue;
2706 if (c_op0)
2708 /* By construction, this is impossible for OpenACC 'kernels'
2709 decomposed parts. */
2710 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2712 error_at (OMP_CLAUSE_LOCATION (c),
2713 "argument not permitted on %qs clause",
2714 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2715 if (tgt)
2716 inform (gimple_location (tgt->stmt),
2717 "enclosing parent compute construct");
2718 else if (oacc_get_fn_attrib (current_function_decl))
2719 inform (DECL_SOURCE_LOCATION (current_function_decl),
2720 "enclosing routine");
2721 else
2722 gcc_unreachable ();
2726 if (tgt && is_oacc_kernels (tgt))
2727 check_oacc_kernel_gwv (stmt, ctx);
2729 /* Collect all variables named in reductions on this loop. Ensure
2730 that, if this loop has a reduction on some variable v, and there is
2731 a reduction on v somewhere in an outer context, then there is a
2732 reduction on v on all intervening loops as well. */
2733 tree local_reduction_clauses = NULL;
2734 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2736 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2737 local_reduction_clauses
2738 = tree_cons (NULL, c, local_reduction_clauses);
2740 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2741 ctx->outer_reduction_clauses
2742 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2743 ctx->outer->outer_reduction_clauses);
2744 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2745 tree local_iter = local_reduction_clauses;
2746 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2748 tree local_clause = TREE_VALUE (local_iter);
2749 tree local_var = OMP_CLAUSE_DECL (local_clause);
2750 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2751 bool have_outer_reduction = false;
2752 tree ctx_iter = outer_reduction_clauses;
2753 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2755 tree outer_clause = TREE_VALUE (ctx_iter);
2756 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2757 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2758 if (outer_var == local_var && outer_op != local_op)
2760 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2761 "conflicting reduction operations for %qE",
2762 local_var);
2763 inform (OMP_CLAUSE_LOCATION (outer_clause),
2764 "location of the previous reduction for %qE",
2765 outer_var);
2767 if (outer_var == local_var)
2769 have_outer_reduction = true;
2770 break;
2773 if (have_outer_reduction)
2775 /* There is a reduction on outer_var both on this loop and on
2776 some enclosing loop. Walk up the context tree until such a
2777 loop with a reduction on outer_var is found, and complain
2778 about all intervening loops that do not have such a
2779 reduction. */
2780 struct omp_context *curr_loop = ctx->outer;
2781 bool found = false;
2782 while (curr_loop != NULL)
2784 tree curr_iter = curr_loop->local_reduction_clauses;
2785 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2787 tree curr_clause = TREE_VALUE (curr_iter);
2788 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2789 if (curr_var == local_var)
2791 found = true;
2792 break;
2795 if (!found)
2796 warning_at (gimple_location (curr_loop->stmt), 0,
2797 "nested loop in reduction needs "
2798 "reduction clause for %qE",
2799 local_var);
2800 else
2801 break;
2802 curr_loop = curr_loop->outer;
2806 ctx->local_reduction_clauses = local_reduction_clauses;
2807 ctx->outer_reduction_clauses
2808 = chainon (unshare_expr (ctx->local_reduction_clauses),
2809 ctx->outer_reduction_clauses);
2811 if (tgt && is_oacc_kernels (tgt))
2813 /* Strip out reductions, as they are not handled yet. */
2814 tree *prev_ptr = &clauses;
2816 while (tree probe = *prev_ptr)
2818 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2820 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2821 *prev_ptr = *next_ptr;
2822 else
2823 prev_ptr = next_ptr;
2826 gimple_omp_for_set_clauses (stmt, clauses);
2830 scan_sharing_clauses (clauses, ctx);
2832 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2833 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2835 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2836 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2837 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2838 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2840 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2841 return ctx;
2844 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2846 static void
2847 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2848 omp_context *outer_ctx)
2850 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2851 gsi_replace (gsi, bind, false);
2852 gimple_seq seq = NULL;
2853 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2854 tree cond = create_tmp_var_raw (integer_type_node);
2855 DECL_CONTEXT (cond) = current_function_decl;
2856 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2857 gimple_bind_set_vars (bind, cond);
2858 gimple_call_set_lhs (g, cond);
2859 gimple_seq_add_stmt (&seq, g);
2860 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2861 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2862 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2863 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2864 gimple_seq_add_stmt (&seq, g);
2865 g = gimple_build_label (lab1);
2866 gimple_seq_add_stmt (&seq, g);
2867 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2868 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2869 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2870 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2871 gimple_omp_for_set_clauses (new_stmt, clause);
2872 gimple_seq_add_stmt (&seq, new_stmt);
2873 g = gimple_build_goto (lab3);
2874 gimple_seq_add_stmt (&seq, g);
2875 g = gimple_build_label (lab2);
2876 gimple_seq_add_stmt (&seq, g);
2877 gimple_seq_add_stmt (&seq, stmt);
2878 g = gimple_build_label (lab3);
2879 gimple_seq_add_stmt (&seq, g);
2880 gimple_bind_set_body (bind, seq);
2881 update_stmt (bind);
2882 scan_omp_for (new_stmt, outer_ctx);
2883 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2886 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2887 struct walk_stmt_info *);
2888 static omp_context *maybe_lookup_ctx (gimple *);
2890 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2891 for scan phase loop. */
2893 static void
2894 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2895 omp_context *outer_ctx)
2897 /* The only change between inclusive and exclusive scan will be
2898 within the first simd loop, so just use inclusive in the
2899 worksharing loop. */
2900 outer_ctx->scan_inclusive = true;
2901 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2902 OMP_CLAUSE_DECL (c) = integer_zero_node;
2904 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2905 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2906 gsi_replace (gsi, input_stmt, false);
2907 gimple_seq input_body = NULL;
2908 gimple_seq_add_stmt (&input_body, stmt);
2909 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2911 gimple_stmt_iterator input1_gsi = gsi_none ();
2912 struct walk_stmt_info wi;
2913 memset (&wi, 0, sizeof (wi));
2914 wi.val_only = true;
2915 wi.info = (void *) &input1_gsi;
2916 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2917 gcc_assert (!gsi_end_p (input1_gsi));
2919 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2920 gsi_next (&input1_gsi);
2921 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2922 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2923 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2924 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2925 std::swap (input_stmt1, scan_stmt1);
2927 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2928 gimple_omp_set_body (input_stmt1, NULL);
2930 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2931 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2933 gimple_omp_set_body (input_stmt1, input_body1);
2934 gimple_omp_set_body (scan_stmt1, NULL);
2936 gimple_stmt_iterator input2_gsi = gsi_none ();
2937 memset (&wi, 0, sizeof (wi));
2938 wi.val_only = true;
2939 wi.info = (void *) &input2_gsi;
2940 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2941 NULL, &wi);
2942 gcc_assert (!gsi_end_p (input2_gsi));
2944 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2945 gsi_next (&input2_gsi);
2946 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2947 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2948 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2949 std::swap (input_stmt2, scan_stmt2);
2951 gimple_omp_set_body (input_stmt2, NULL);
2953 gimple_omp_set_body (input_stmt, input_body);
2954 gimple_omp_set_body (scan_stmt, scan_body);
2956 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2957 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2959 ctx = new_omp_context (scan_stmt, outer_ctx);
2960 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2962 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2965 /* Scan an OpenMP sections directive. */
2967 static void
2968 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2970 omp_context *ctx;
2972 ctx = new_omp_context (stmt, outer_ctx);
2973 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2974 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2977 /* Scan an OpenMP single directive. */
2979 static void
2980 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2982 omp_context *ctx;
2983 tree name;
2985 ctx = new_omp_context (stmt, outer_ctx);
2986 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2987 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2988 name = create_tmp_var_name (".omp_copy_s");
2989 name = build_decl (gimple_location (stmt),
2990 TYPE_DECL, name, ctx->record_type);
2991 TYPE_NAME (ctx->record_type) = name;
2993 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2994 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2996 if (TYPE_FIELDS (ctx->record_type) == NULL)
2997 ctx->record_type = NULL;
2998 else
2999 layout_type (ctx->record_type);
3002 /* Scan a GIMPLE_OMP_TARGET. */
3004 static void
3005 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3007 omp_context *ctx;
3008 tree name;
3009 bool offloaded = is_gimple_omp_offloaded (stmt);
3010 tree clauses = gimple_omp_target_clauses (stmt);
3012 ctx = new_omp_context (stmt, outer_ctx);
3013 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3014 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3015 name = create_tmp_var_name (".omp_data_t");
3016 name = build_decl (gimple_location (stmt),
3017 TYPE_DECL, name, ctx->record_type);
3018 DECL_ARTIFICIAL (name) = 1;
3019 DECL_NAMELESS (name) = 1;
3020 TYPE_NAME (ctx->record_type) = name;
3021 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3023 if (offloaded)
3025 create_omp_child_function (ctx, false);
3026 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3029 scan_sharing_clauses (clauses, ctx);
3030 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3032 if (TYPE_FIELDS (ctx->record_type) == NULL)
3033 ctx->record_type = ctx->receiver_decl = NULL;
3034 else
3036 TYPE_FIELDS (ctx->record_type)
3037 = nreverse (TYPE_FIELDS (ctx->record_type));
3038 if (flag_checking)
3040 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3041 for (tree field = TYPE_FIELDS (ctx->record_type);
3042 field;
3043 field = DECL_CHAIN (field))
3044 gcc_assert (DECL_ALIGN (field) == align);
3046 layout_type (ctx->record_type);
3047 if (offloaded)
3048 fixup_child_record_type (ctx);
3051 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3053 error_at (gimple_location (stmt),
3054 "%<target%> construct with nested %<teams%> construct "
3055 "contains directives outside of the %<teams%> construct");
3056 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3060 /* Scan an OpenMP teams directive. */
3062 static void
3063 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3065 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3067 if (!gimple_omp_teams_host (stmt))
3069 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3070 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3071 return;
3073 taskreg_contexts.safe_push (ctx);
3074 gcc_assert (taskreg_nesting_level == 1);
3075 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3076 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3077 tree name = create_tmp_var_name (".omp_data_s");
3078 name = build_decl (gimple_location (stmt),
3079 TYPE_DECL, name, ctx->record_type);
3080 DECL_ARTIFICIAL (name) = 1;
3081 DECL_NAMELESS (name) = 1;
3082 TYPE_NAME (ctx->record_type) = name;
3083 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3084 create_omp_child_function (ctx, false);
3085 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3087 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3088 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3090 if (TYPE_FIELDS (ctx->record_type) == NULL)
3091 ctx->record_type = ctx->receiver_decl = NULL;
3094 /* Check nesting restrictions. */
3095 static bool
3096 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3098 tree c;
3100 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3101 inside an OpenACC CTX. */
3102 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3103 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3104 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3106 else if (!(is_gimple_omp (stmt)
3107 && is_gimple_omp_oacc (stmt)))
3109 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3111 error_at (gimple_location (stmt),
3112 "non-OpenACC construct inside of OpenACC routine");
3113 return false;
3115 else
3116 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3117 if (is_gimple_omp (octx->stmt)
3118 && is_gimple_omp_oacc (octx->stmt))
3120 error_at (gimple_location (stmt),
3121 "non-OpenACC construct inside of OpenACC region");
3122 return false;
3126 if (ctx != NULL)
3128 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3129 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3131 c = omp_find_clause (gimple_omp_target_clauses (ctx->stmt),
3132 OMP_CLAUSE_DEVICE);
3133 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3135 error_at (gimple_location (stmt),
3136 "OpenMP constructs are not allowed in target region "
3137 "with %<ancestor%>");
3138 return false;
3141 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3142 ctx->teams_nested_p = true;
3143 else
3144 ctx->nonteams_nested_p = true;
3146 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3147 && ctx->outer
3148 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3149 ctx = ctx->outer;
3150 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3151 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3152 && !ctx->loop_p)
3154 c = NULL_TREE;
3155 if (ctx->order_concurrent
3156 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3157 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3158 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3160 error_at (gimple_location (stmt),
3161 "OpenMP constructs other than %<parallel%>, %<loop%>"
3162 " or %<simd%> may not be nested inside a region with"
3163 " the %<order(concurrent)%> clause");
3164 return false;
3166 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3168 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3169 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3171 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3172 && (ctx->outer == NULL
3173 || !gimple_omp_for_combined_into_p (ctx->stmt)
3174 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3175 || (gimple_omp_for_kind (ctx->outer->stmt)
3176 != GF_OMP_FOR_KIND_FOR)
3177 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3179 error_at (gimple_location (stmt),
3180 "%<ordered simd threads%> must be closely "
3181 "nested inside of %<%s simd%> region",
3182 lang_GNU_Fortran () ? "do" : "for");
3183 return false;
3185 return true;
3188 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3189 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3190 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3191 return true;
3192 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3193 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3194 return true;
3195 error_at (gimple_location (stmt),
3196 "OpenMP constructs other than "
3197 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3198 "not be nested inside %<simd%> region");
3199 return false;
3201 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3203 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3204 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3205 && omp_find_clause (gimple_omp_for_clauses (stmt),
3206 OMP_CLAUSE_BIND) == NULL_TREE))
3207 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3209 error_at (gimple_location (stmt),
3210 "only %<distribute%>, %<parallel%> or %<loop%> "
3211 "regions are allowed to be strictly nested inside "
3212 "%<teams%> region");
3213 return false;
3216 else if (ctx->order_concurrent
3217 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3218 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3219 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3220 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3222 if (ctx->loop_p)
3223 error_at (gimple_location (stmt),
3224 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3225 "%<simd%> may not be nested inside a %<loop%> region");
3226 else
3227 error_at (gimple_location (stmt),
3228 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3229 "%<simd%> may not be nested inside a region with "
3230 "the %<order(concurrent)%> clause");
3231 return false;
3234 switch (gimple_code (stmt))
3236 case GIMPLE_OMP_FOR:
3237 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3238 return true;
3239 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3241 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3243 error_at (gimple_location (stmt),
3244 "%<distribute%> region must be strictly nested "
3245 "inside %<teams%> construct");
3246 return false;
3248 return true;
3250 /* We split taskloop into task and nested taskloop in it. */
3251 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3252 return true;
3253 /* For now, hope this will change and loop bind(parallel) will not
3254 be allowed in lots of contexts. */
3255 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3256 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3257 return true;
3258 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3260 bool ok = false;
3262 if (ctx)
3263 switch (gimple_code (ctx->stmt))
3265 case GIMPLE_OMP_FOR:
3266 ok = (gimple_omp_for_kind (ctx->stmt)
3267 == GF_OMP_FOR_KIND_OACC_LOOP);
3268 break;
3270 case GIMPLE_OMP_TARGET:
3271 switch (gimple_omp_target_kind (ctx->stmt))
3273 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3274 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3275 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3276 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3277 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3278 ok = true;
3279 break;
3281 default:
3282 break;
3285 default:
3286 break;
3288 else if (oacc_get_fn_attrib (current_function_decl))
3289 ok = true;
3290 if (!ok)
3292 error_at (gimple_location (stmt),
3293 "OpenACC loop directive must be associated with"
3294 " an OpenACC compute region");
3295 return false;
3298 /* FALLTHRU */
3299 case GIMPLE_CALL:
3300 if (is_gimple_call (stmt)
3301 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3302 == BUILT_IN_GOMP_CANCEL
3303 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3304 == BUILT_IN_GOMP_CANCELLATION_POINT))
3306 const char *bad = NULL;
3307 const char *kind = NULL;
3308 const char *construct
3309 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3310 == BUILT_IN_GOMP_CANCEL)
3311 ? "cancel"
3312 : "cancellation point";
3313 if (ctx == NULL)
3315 error_at (gimple_location (stmt), "orphaned %qs construct",
3316 construct);
3317 return false;
3319 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3320 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3321 : 0)
3323 case 1:
3324 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3325 bad = "parallel";
3326 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3327 == BUILT_IN_GOMP_CANCEL
3328 && !integer_zerop (gimple_call_arg (stmt, 1)))
3329 ctx->cancellable = true;
3330 kind = "parallel";
3331 break;
3332 case 2:
3333 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3334 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3335 bad = "for";
3336 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3337 == BUILT_IN_GOMP_CANCEL
3338 && !integer_zerop (gimple_call_arg (stmt, 1)))
3340 ctx->cancellable = true;
3341 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3342 OMP_CLAUSE_NOWAIT))
3343 warning_at (gimple_location (stmt), 0,
3344 "%<cancel for%> inside "
3345 "%<nowait%> for construct");
3346 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3347 OMP_CLAUSE_ORDERED))
3348 warning_at (gimple_location (stmt), 0,
3349 "%<cancel for%> inside "
3350 "%<ordered%> for construct");
3352 kind = "for";
3353 break;
3354 case 4:
3355 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3356 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3357 bad = "sections";
3358 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3359 == BUILT_IN_GOMP_CANCEL
3360 && !integer_zerop (gimple_call_arg (stmt, 1)))
3362 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3364 ctx->cancellable = true;
3365 if (omp_find_clause (gimple_omp_sections_clauses
3366 (ctx->stmt),
3367 OMP_CLAUSE_NOWAIT))
3368 warning_at (gimple_location (stmt), 0,
3369 "%<cancel sections%> inside "
3370 "%<nowait%> sections construct");
3372 else
3374 gcc_assert (ctx->outer
3375 && gimple_code (ctx->outer->stmt)
3376 == GIMPLE_OMP_SECTIONS);
3377 ctx->outer->cancellable = true;
3378 if (omp_find_clause (gimple_omp_sections_clauses
3379 (ctx->outer->stmt),
3380 OMP_CLAUSE_NOWAIT))
3381 warning_at (gimple_location (stmt), 0,
3382 "%<cancel sections%> inside "
3383 "%<nowait%> sections construct");
3386 kind = "sections";
3387 break;
3388 case 8:
3389 if (!is_task_ctx (ctx)
3390 && (!is_taskloop_ctx (ctx)
3391 || ctx->outer == NULL
3392 || !is_task_ctx (ctx->outer)))
3393 bad = "task";
3394 else
3396 for (omp_context *octx = ctx->outer;
3397 octx; octx = octx->outer)
3399 switch (gimple_code (octx->stmt))
3401 case GIMPLE_OMP_TASKGROUP:
3402 break;
3403 case GIMPLE_OMP_TARGET:
3404 if (gimple_omp_target_kind (octx->stmt)
3405 != GF_OMP_TARGET_KIND_REGION)
3406 continue;
3407 /* FALLTHRU */
3408 case GIMPLE_OMP_PARALLEL:
3409 case GIMPLE_OMP_TEAMS:
3410 error_at (gimple_location (stmt),
3411 "%<%s taskgroup%> construct not closely "
3412 "nested inside of %<taskgroup%> region",
3413 construct);
3414 return false;
3415 case GIMPLE_OMP_TASK:
3416 if (gimple_omp_task_taskloop_p (octx->stmt)
3417 && octx->outer
3418 && is_taskloop_ctx (octx->outer))
3420 tree clauses
3421 = gimple_omp_for_clauses (octx->outer->stmt);
3422 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3423 break;
3425 continue;
3426 default:
3427 continue;
3429 break;
3431 ctx->cancellable = true;
3433 kind = "taskgroup";
3434 break;
3435 default:
3436 error_at (gimple_location (stmt), "invalid arguments");
3437 return false;
3439 if (bad)
3441 error_at (gimple_location (stmt),
3442 "%<%s %s%> construct not closely nested inside of %qs",
3443 construct, kind, bad);
3444 return false;
3447 /* FALLTHRU */
3448 case GIMPLE_OMP_SECTIONS:
3449 case GIMPLE_OMP_SINGLE:
3450 for (; ctx != NULL; ctx = ctx->outer)
3451 switch (gimple_code (ctx->stmt))
3453 case GIMPLE_OMP_FOR:
3454 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3455 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3456 break;
3457 /* FALLTHRU */
3458 case GIMPLE_OMP_SECTIONS:
3459 case GIMPLE_OMP_SINGLE:
3460 case GIMPLE_OMP_ORDERED:
3461 case GIMPLE_OMP_MASTER:
3462 case GIMPLE_OMP_MASKED:
3463 case GIMPLE_OMP_TASK:
3464 case GIMPLE_OMP_CRITICAL:
3465 if (is_gimple_call (stmt))
3467 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3468 != BUILT_IN_GOMP_BARRIER)
3469 return true;
3470 error_at (gimple_location (stmt),
3471 "barrier region may not be closely nested inside "
3472 "of work-sharing, %<loop%>, %<critical%>, "
3473 "%<ordered%>, %<master%>, %<masked%>, explicit "
3474 "%<task%> or %<taskloop%> region");
3475 return false;
3477 error_at (gimple_location (stmt),
3478 "work-sharing region may not be closely nested inside "
3479 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3480 "%<master%>, %<masked%>, explicit %<task%> or "
3481 "%<taskloop%> region");
3482 return false;
3483 case GIMPLE_OMP_PARALLEL:
3484 case GIMPLE_OMP_TEAMS:
3485 return true;
3486 case GIMPLE_OMP_TARGET:
3487 if (gimple_omp_target_kind (ctx->stmt)
3488 == GF_OMP_TARGET_KIND_REGION)
3489 return true;
3490 break;
3491 default:
3492 break;
3494 break;
3495 case GIMPLE_OMP_MASTER:
3496 case GIMPLE_OMP_MASKED:
3497 for (; ctx != NULL; ctx = ctx->outer)
3498 switch (gimple_code (ctx->stmt))
3500 case GIMPLE_OMP_FOR:
3501 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3502 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3503 break;
3504 /* FALLTHRU */
3505 case GIMPLE_OMP_SECTIONS:
3506 case GIMPLE_OMP_SINGLE:
3507 case GIMPLE_OMP_TASK:
3508 error_at (gimple_location (stmt),
3509 "%qs region may not be closely nested inside "
3510 "of work-sharing, %<loop%>, explicit %<task%> or "
3511 "%<taskloop%> region",
3512 gimple_code (stmt) == GIMPLE_OMP_MASTER
3513 ? "master" : "masked");
3514 return false;
3515 case GIMPLE_OMP_PARALLEL:
3516 case GIMPLE_OMP_TEAMS:
3517 return true;
3518 case GIMPLE_OMP_TARGET:
3519 if (gimple_omp_target_kind (ctx->stmt)
3520 == GF_OMP_TARGET_KIND_REGION)
3521 return true;
3522 break;
3523 default:
3524 break;
3526 break;
3527 case GIMPLE_OMP_SCOPE:
3528 for (; ctx != NULL; ctx = ctx->outer)
3529 switch (gimple_code (ctx->stmt))
3531 case GIMPLE_OMP_FOR:
3532 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3533 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3534 break;
3535 /* FALLTHRU */
3536 case GIMPLE_OMP_SECTIONS:
3537 case GIMPLE_OMP_SINGLE:
3538 case GIMPLE_OMP_TASK:
3539 case GIMPLE_OMP_CRITICAL:
3540 case GIMPLE_OMP_ORDERED:
3541 case GIMPLE_OMP_MASTER:
3542 case GIMPLE_OMP_MASKED:
3543 error_at (gimple_location (stmt),
3544 "%<scope%> region may not be closely nested inside "
3545 "of work-sharing, %<loop%>, explicit %<task%>, "
3546 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3547 "or %<masked%> region");
3548 return false;
3549 case GIMPLE_OMP_PARALLEL:
3550 case GIMPLE_OMP_TEAMS:
3551 return true;
3552 case GIMPLE_OMP_TARGET:
3553 if (gimple_omp_target_kind (ctx->stmt)
3554 == GF_OMP_TARGET_KIND_REGION)
3555 return true;
3556 break;
3557 default:
3558 break;
3560 break;
3561 case GIMPLE_OMP_TASK:
3562 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3563 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3564 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3565 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3567 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3568 error_at (OMP_CLAUSE_LOCATION (c),
3569 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3570 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3571 return false;
3573 break;
3574 case GIMPLE_OMP_ORDERED:
3575 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3576 c; c = OMP_CLAUSE_CHAIN (c))
3578 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3580 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3581 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3582 continue;
3584 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3585 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3586 || kind == OMP_CLAUSE_DEPEND_SINK)
3588 tree oclause;
3589 /* Look for containing ordered(N) loop. */
3590 if (ctx == NULL
3591 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3592 || (oclause
3593 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3594 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3596 error_at (OMP_CLAUSE_LOCATION (c),
3597 "%<ordered%> construct with %<depend%> clause "
3598 "must be closely nested inside an %<ordered%> "
3599 "loop");
3600 return false;
3602 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3604 error_at (OMP_CLAUSE_LOCATION (c),
3605 "%<ordered%> construct with %<depend%> clause "
3606 "must be closely nested inside a loop with "
3607 "%<ordered%> clause with a parameter");
3608 return false;
3611 else
3613 error_at (OMP_CLAUSE_LOCATION (c),
3614 "invalid depend kind in omp %<ordered%> %<depend%>");
3615 return false;
3618 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3619 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3621 /* ordered simd must be closely nested inside of simd region,
3622 and simd region must not encounter constructs other than
3623 ordered simd, therefore ordered simd may be either orphaned,
3624 or ctx->stmt must be simd. The latter case is handled already
3625 earlier. */
3626 if (ctx != NULL)
3628 error_at (gimple_location (stmt),
3629 "%<ordered%> %<simd%> must be closely nested inside "
3630 "%<simd%> region");
3631 return false;
3634 for (; ctx != NULL; ctx = ctx->outer)
3635 switch (gimple_code (ctx->stmt))
3637 case GIMPLE_OMP_CRITICAL:
3638 case GIMPLE_OMP_TASK:
3639 case GIMPLE_OMP_ORDERED:
3640 ordered_in_taskloop:
3641 error_at (gimple_location (stmt),
3642 "%<ordered%> region may not be closely nested inside "
3643 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3644 "%<taskloop%> region");
3645 return false;
3646 case GIMPLE_OMP_FOR:
3647 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3648 goto ordered_in_taskloop;
3649 tree o;
3650 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3651 OMP_CLAUSE_ORDERED);
3652 if (o == NULL)
3654 error_at (gimple_location (stmt),
3655 "%<ordered%> region must be closely nested inside "
3656 "a loop region with an %<ordered%> clause");
3657 return false;
3659 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3660 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3662 error_at (gimple_location (stmt),
3663 "%<ordered%> region without %<depend%> clause may "
3664 "not be closely nested inside a loop region with "
3665 "an %<ordered%> clause with a parameter");
3666 return false;
3668 return true;
3669 case GIMPLE_OMP_TARGET:
3670 if (gimple_omp_target_kind (ctx->stmt)
3671 != GF_OMP_TARGET_KIND_REGION)
3672 break;
3673 /* FALLTHRU */
3674 case GIMPLE_OMP_PARALLEL:
3675 case GIMPLE_OMP_TEAMS:
3676 error_at (gimple_location (stmt),
3677 "%<ordered%> region must be closely nested inside "
3678 "a loop region with an %<ordered%> clause");
3679 return false;
3680 default:
3681 break;
3683 break;
3684 case GIMPLE_OMP_CRITICAL:
3686 tree this_stmt_name
3687 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3688 for (; ctx != NULL; ctx = ctx->outer)
3689 if (gomp_critical *other_crit
3690 = dyn_cast <gomp_critical *> (ctx->stmt))
3691 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3693 error_at (gimple_location (stmt),
3694 "%<critical%> region may not be nested inside "
3695 "a %<critical%> region with the same name");
3696 return false;
3699 break;
3700 case GIMPLE_OMP_TEAMS:
3701 if (ctx == NULL)
3702 break;
3703 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3704 || (gimple_omp_target_kind (ctx->stmt)
3705 != GF_OMP_TARGET_KIND_REGION))
3707 /* Teams construct can appear either strictly nested inside of
3708 target construct with no intervening stmts, or can be encountered
3709 only by initial task (so must not appear inside any OpenMP
3710 construct. */
3711 error_at (gimple_location (stmt),
3712 "%<teams%> construct must be closely nested inside of "
3713 "%<target%> construct or not nested in any OpenMP "
3714 "construct");
3715 return false;
3717 break;
3718 case GIMPLE_OMP_TARGET:
3719 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3720 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3721 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3722 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3724 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3725 error_at (OMP_CLAUSE_LOCATION (c),
3726 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3727 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3728 return false;
3730 if (is_gimple_omp_offloaded (stmt)
3731 && oacc_get_fn_attrib (cfun->decl) != NULL)
3733 error_at (gimple_location (stmt),
3734 "OpenACC region inside of OpenACC routine, nested "
3735 "parallelism not supported yet");
3736 return false;
3738 for (; ctx != NULL; ctx = ctx->outer)
3740 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3742 if (is_gimple_omp (stmt)
3743 && is_gimple_omp_oacc (stmt)
3744 && is_gimple_omp (ctx->stmt))
3746 error_at (gimple_location (stmt),
3747 "OpenACC construct inside of non-OpenACC region");
3748 return false;
3750 continue;
3753 const char *stmt_name, *ctx_stmt_name;
3754 switch (gimple_omp_target_kind (stmt))
3756 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3757 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3758 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3759 case GF_OMP_TARGET_KIND_ENTER_DATA:
3760 stmt_name = "target enter data"; break;
3761 case GF_OMP_TARGET_KIND_EXIT_DATA:
3762 stmt_name = "target exit data"; break;
3763 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3764 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3765 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3766 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3767 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3768 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3769 stmt_name = "enter data"; break;
3770 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3771 stmt_name = "exit data"; break;
3772 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3773 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3774 break;
3775 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3776 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3777 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3778 /* OpenACC 'kernels' decomposed parts. */
3779 stmt_name = "kernels"; break;
3780 default: gcc_unreachable ();
3782 switch (gimple_omp_target_kind (ctx->stmt))
3784 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3785 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3786 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3787 ctx_stmt_name = "parallel"; break;
3788 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3789 ctx_stmt_name = "kernels"; break;
3790 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3791 ctx_stmt_name = "serial"; break;
3792 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3793 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3794 ctx_stmt_name = "host_data"; break;
3795 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3796 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3797 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3798 /* OpenACC 'kernels' decomposed parts. */
3799 ctx_stmt_name = "kernels"; break;
3800 default: gcc_unreachable ();
3803 /* OpenACC/OpenMP mismatch? */
3804 if (is_gimple_omp_oacc (stmt)
3805 != is_gimple_omp_oacc (ctx->stmt))
3807 error_at (gimple_location (stmt),
3808 "%s %qs construct inside of %s %qs region",
3809 (is_gimple_omp_oacc (stmt)
3810 ? "OpenACC" : "OpenMP"), stmt_name,
3811 (is_gimple_omp_oacc (ctx->stmt)
3812 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3813 return false;
3815 if (is_gimple_omp_offloaded (ctx->stmt))
3817 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3818 if (is_gimple_omp_oacc (ctx->stmt))
3820 error_at (gimple_location (stmt),
3821 "%qs construct inside of %qs region",
3822 stmt_name, ctx_stmt_name);
3823 return false;
3825 else
3827 warning_at (gimple_location (stmt), 0,
3828 "%qs construct inside of %qs region",
3829 stmt_name, ctx_stmt_name);
3833 break;
3834 default:
3835 break;
3837 return true;
3841 /* Helper function scan_omp.
3843 Callback for walk_tree or operators in walk_gimple_stmt used to
3844 scan for OMP directives in TP. */
3846 static tree
3847 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3849 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3850 omp_context *ctx = (omp_context *) wi->info;
3851 tree t = *tp;
3853 switch (TREE_CODE (t))
3855 case VAR_DECL:
3856 case PARM_DECL:
3857 case LABEL_DECL:
3858 case RESULT_DECL:
3859 if (ctx)
3861 tree repl = remap_decl (t, &ctx->cb);
3862 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3863 *tp = repl;
3865 break;
3867 default:
3868 if (ctx && TYPE_P (t))
3869 *tp = remap_type (t, &ctx->cb);
3870 else if (!DECL_P (t))
3872 *walk_subtrees = 1;
3873 if (ctx)
3875 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3876 if (tem != TREE_TYPE (t))
3878 if (TREE_CODE (t) == INTEGER_CST)
3879 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3880 else
3881 TREE_TYPE (t) = tem;
3885 break;
3888 return NULL_TREE;
3891 /* Return true if FNDECL is a setjmp or a longjmp. */
3893 static bool
3894 setjmp_or_longjmp_p (const_tree fndecl)
3896 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3897 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3898 return true;
3900 tree declname = DECL_NAME (fndecl);
3901 if (!declname
3902 || (DECL_CONTEXT (fndecl) != NULL_TREE
3903 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3904 || !TREE_PUBLIC (fndecl))
3905 return false;
3907 const char *name = IDENTIFIER_POINTER (declname);
3908 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3911 /* Return true if FNDECL is an omp_* runtime API call. */
3913 static bool
3914 omp_runtime_api_call (const_tree fndecl)
3916 tree declname = DECL_NAME (fndecl);
3917 if (!declname
3918 || (DECL_CONTEXT (fndecl) != NULL_TREE
3919 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3920 || !TREE_PUBLIC (fndecl))
3921 return false;
3923 const char *name = IDENTIFIER_POINTER (declname);
3924 if (!startswith (name, "omp_"))
3925 return false;
3927 static const char *omp_runtime_apis[] =
3929 /* This array has 3 sections. First omp_* calls that don't
3930 have any suffixes. */
3931 "aligned_alloc",
3932 "aligned_calloc",
3933 "alloc",
3934 "calloc",
3935 "free",
3936 "realloc",
3937 "target_alloc",
3938 "target_associate_ptr",
3939 "target_disassociate_ptr",
3940 "target_free",
3941 "target_is_present",
3942 "target_memcpy",
3943 "target_memcpy_rect",
3944 NULL,
3945 /* Now omp_* calls that are available as omp_* and omp_*_; however, the
3946 DECL_NAME is always omp_* without tailing underscore. */
3947 "capture_affinity",
3948 "destroy_allocator",
3949 "destroy_lock",
3950 "destroy_nest_lock",
3951 "display_affinity",
3952 "fulfill_event",
3953 "get_active_level",
3954 "get_affinity_format",
3955 "get_cancellation",
3956 "get_default_allocator",
3957 "get_default_device",
3958 "get_device_num",
3959 "get_dynamic",
3960 "get_initial_device",
3961 "get_level",
3962 "get_max_active_levels",
3963 "get_max_task_priority",
3964 "get_max_teams",
3965 "get_max_threads",
3966 "get_nested",
3967 "get_num_devices",
3968 "get_num_places",
3969 "get_num_procs",
3970 "get_num_teams",
3971 "get_num_threads",
3972 "get_partition_num_places",
3973 "get_place_num",
3974 "get_proc_bind",
3975 "get_supported_active_levels",
3976 "get_team_num",
3977 "get_teams_thread_limit",
3978 "get_thread_limit",
3979 "get_thread_num",
3980 "get_wtick",
3981 "get_wtime",
3982 "in_final",
3983 "in_parallel",
3984 "init_lock",
3985 "init_nest_lock",
3986 "is_initial_device",
3987 "pause_resource",
3988 "pause_resource_all",
3989 "set_affinity_format",
3990 "set_default_allocator",
3991 "set_lock",
3992 "set_nest_lock",
3993 "test_lock",
3994 "test_nest_lock",
3995 "unset_lock",
3996 "unset_nest_lock",
3997 NULL,
3998 /* And finally calls available as omp_*, omp_*_ and omp_*_8_; however,
3999 as DECL_NAME only omp_* and omp_*_8 appear. */
4000 "display_env",
4001 "get_ancestor_thread_num",
4002 "init_allocator",
4003 "get_partition_place_nums",
4004 "get_place_num_procs",
4005 "get_place_proc_ids",
4006 "get_schedule",
4007 "get_team_size",
4008 "set_default_device",
4009 "set_dynamic",
4010 "set_max_active_levels",
4011 "set_nested",
4012 "set_num_teams",
4013 "set_num_threads",
4014 "set_schedule",
4015 "set_teams_thread_limit"
4018 int mode = 0;
4019 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
4021 if (omp_runtime_apis[i] == NULL)
4023 mode++;
4024 continue;
4026 size_t len = strlen (omp_runtime_apis[i]);
4027 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
4028 && (name[4 + len] == '\0'
4029 || (mode > 1 && strcmp (name + 4 + len, "_8") == 0)))
4030 return true;
4032 return false;
4035 /* Helper function for scan_omp.
4037 Callback for walk_gimple_stmt used to scan for OMP directives in
4038 the current statement in GSI. */
4040 static tree
4041 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4042 struct walk_stmt_info *wi)
4044 gimple *stmt = gsi_stmt (*gsi);
4045 omp_context *ctx = (omp_context *) wi->info;
4047 if (gimple_has_location (stmt))
4048 input_location = gimple_location (stmt);
4050 /* Check the nesting restrictions. */
4051 bool remove = false;
4052 if (is_gimple_omp (stmt))
4053 remove = !check_omp_nesting_restrictions (stmt, ctx);
4054 else if (is_gimple_call (stmt))
4056 tree fndecl = gimple_call_fndecl (stmt);
4057 if (fndecl)
4059 if (ctx
4060 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4061 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4062 && setjmp_or_longjmp_p (fndecl)
4063 && !ctx->loop_p)
4065 remove = true;
4066 error_at (gimple_location (stmt),
4067 "setjmp/longjmp inside %<simd%> construct");
4069 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4070 switch (DECL_FUNCTION_CODE (fndecl))
4072 case BUILT_IN_GOMP_BARRIER:
4073 case BUILT_IN_GOMP_CANCEL:
4074 case BUILT_IN_GOMP_CANCELLATION_POINT:
4075 case BUILT_IN_GOMP_TASKYIELD:
4076 case BUILT_IN_GOMP_TASKWAIT:
4077 case BUILT_IN_GOMP_TASKGROUP_START:
4078 case BUILT_IN_GOMP_TASKGROUP_END:
4079 remove = !check_omp_nesting_restrictions (stmt, ctx);
4080 break;
4081 default:
4082 break;
4084 else if (ctx)
4086 omp_context *octx = ctx;
4087 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4088 octx = ctx->outer;
4089 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4091 remove = true;
4092 error_at (gimple_location (stmt),
4093 "OpenMP runtime API call %qD in a region with "
4094 "%<order(concurrent)%> clause", fndecl);
4096 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4097 && omp_runtime_api_call (fndecl)
4098 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4099 != strlen ("omp_get_num_teams"))
4100 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4101 "omp_get_num_teams") != 0)
4102 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4103 != strlen ("omp_get_team_num"))
4104 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4105 "omp_get_team_num") != 0))
4107 remove = true;
4108 error_at (gimple_location (stmt),
4109 "OpenMP runtime API call %qD strictly nested in a "
4110 "%<teams%> region", fndecl);
4112 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
4113 && (gimple_omp_target_kind (ctx->stmt)
4114 == GF_OMP_TARGET_KIND_REGION)
4115 && omp_runtime_api_call (fndecl))
4117 tree tgt_clauses = gimple_omp_target_clauses (ctx->stmt);
4118 tree c = omp_find_clause (tgt_clauses, OMP_CLAUSE_DEVICE);
4119 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4120 error_at (gimple_location (stmt),
4121 "OpenMP runtime API call %qD in a region with "
4122 "%<device(ancestor)%> clause", fndecl);
4127 if (remove)
4129 stmt = gimple_build_nop ();
4130 gsi_replace (gsi, stmt, false);
4133 *handled_ops_p = true;
4135 switch (gimple_code (stmt))
4137 case GIMPLE_OMP_PARALLEL:
4138 taskreg_nesting_level++;
4139 scan_omp_parallel (gsi, ctx);
4140 taskreg_nesting_level--;
4141 break;
4143 case GIMPLE_OMP_TASK:
4144 taskreg_nesting_level++;
4145 scan_omp_task (gsi, ctx);
4146 taskreg_nesting_level--;
4147 break;
4149 case GIMPLE_OMP_FOR:
4150 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4151 == GF_OMP_FOR_KIND_SIMD)
4152 && gimple_omp_for_combined_into_p (stmt)
4153 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4155 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4156 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4157 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4159 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4160 break;
4163 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4164 == GF_OMP_FOR_KIND_SIMD)
4165 && omp_maybe_offloaded_ctx (ctx)
4166 && omp_max_simt_vf ()
4167 && gimple_omp_for_collapse (stmt) == 1)
4168 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4169 else
4170 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4171 break;
4173 case GIMPLE_OMP_SCOPE:
4174 ctx = new_omp_context (stmt, ctx);
4175 scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4176 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4177 break;
4179 case GIMPLE_OMP_SECTIONS:
4180 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4181 break;
4183 case GIMPLE_OMP_SINGLE:
4184 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4185 break;
4187 case GIMPLE_OMP_SCAN:
4188 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4190 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4191 ctx->scan_inclusive = true;
4192 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4193 ctx->scan_exclusive = true;
4195 /* FALLTHRU */
4196 case GIMPLE_OMP_SECTION:
4197 case GIMPLE_OMP_MASTER:
4198 case GIMPLE_OMP_ORDERED:
4199 case GIMPLE_OMP_CRITICAL:
4200 ctx = new_omp_context (stmt, ctx);
4201 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4202 break;
4204 case GIMPLE_OMP_MASKED:
4205 ctx = new_omp_context (stmt, ctx);
4206 scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4207 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4208 break;
4210 case GIMPLE_OMP_TASKGROUP:
4211 ctx = new_omp_context (stmt, ctx);
4212 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4213 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4214 break;
4216 case GIMPLE_OMP_TARGET:
4217 if (is_gimple_omp_offloaded (stmt))
4219 taskreg_nesting_level++;
4220 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4221 taskreg_nesting_level--;
4223 else
4224 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4225 break;
4227 case GIMPLE_OMP_TEAMS:
4228 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4230 taskreg_nesting_level++;
4231 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4232 taskreg_nesting_level--;
4234 else
4235 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4236 break;
4238 case GIMPLE_BIND:
4240 tree var;
4242 *handled_ops_p = false;
4243 if (ctx)
4244 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4245 var ;
4246 var = DECL_CHAIN (var))
4247 insert_decl_map (&ctx->cb, var, var);
4249 break;
4250 default:
4251 *handled_ops_p = false;
4252 break;
4255 return NULL_TREE;
4259 /* Scan all the statements starting at the current statement. CTX
4260 contains context information about the OMP directives and
4261 clauses found during the scan. */
4263 static void
4264 scan_omp (gimple_seq *body_p, omp_context *ctx)
4266 location_t saved_location;
4267 struct walk_stmt_info wi;
4269 memset (&wi, 0, sizeof (wi));
4270 wi.info = ctx;
4271 wi.want_locations = true;
4273 saved_location = input_location;
4274 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4275 input_location = saved_location;
4278 /* Re-gimplification and code generation routines. */
4280 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4281 of BIND if in a method. */
4283 static void
4284 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4286 if (DECL_ARGUMENTS (current_function_decl)
4287 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4288 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4289 == POINTER_TYPE))
4291 tree vars = gimple_bind_vars (bind);
4292 for (tree *pvar = &vars; *pvar; )
4293 if (omp_member_access_dummy_var (*pvar))
4294 *pvar = DECL_CHAIN (*pvar);
4295 else
4296 pvar = &DECL_CHAIN (*pvar);
4297 gimple_bind_set_vars (bind, vars);
4301 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4302 block and its subblocks. */
4304 static void
4305 remove_member_access_dummy_vars (tree block)
4307 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4308 if (omp_member_access_dummy_var (*pvar))
4309 *pvar = DECL_CHAIN (*pvar);
4310 else
4311 pvar = &DECL_CHAIN (*pvar);
4313 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4314 remove_member_access_dummy_vars (block);
4317 /* If a context was created for STMT when it was scanned, return it. */
4319 static omp_context *
4320 maybe_lookup_ctx (gimple *stmt)
4322 splay_tree_node n;
4323 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4324 return n ? (omp_context *) n->value : NULL;
4328 /* Find the mapping for DECL in CTX or the immediately enclosing
4329 context that has a mapping for DECL.
4331 If CTX is a nested parallel directive, we may have to use the decl
4332 mappings created in CTX's parent context. Suppose that we have the
4333 following parallel nesting (variable UIDs showed for clarity):
4335 iD.1562 = 0;
4336 #omp parallel shared(iD.1562) -> outer parallel
4337 iD.1562 = iD.1562 + 1;
4339 #omp parallel shared (iD.1562) -> inner parallel
4340 iD.1562 = iD.1562 - 1;
4342 Each parallel structure will create a distinct .omp_data_s structure
4343 for copying iD.1562 in/out of the directive:
4345 outer parallel .omp_data_s.1.i -> iD.1562
4346 inner parallel .omp_data_s.2.i -> iD.1562
4348 A shared variable mapping will produce a copy-out operation before
4349 the parallel directive and a copy-in operation after it. So, in
4350 this case we would have:
4352 iD.1562 = 0;
4353 .omp_data_o.1.i = iD.1562;
4354 #omp parallel shared(iD.1562) -> outer parallel
4355 .omp_data_i.1 = &.omp_data_o.1
4356 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4358 .omp_data_o.2.i = iD.1562; -> **
4359 #omp parallel shared(iD.1562) -> inner parallel
4360 .omp_data_i.2 = &.omp_data_o.2
4361 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4364 ** This is a problem. The symbol iD.1562 cannot be referenced
4365 inside the body of the outer parallel region. But since we are
4366 emitting this copy operation while expanding the inner parallel
4367 directive, we need to access the CTX structure of the outer
4368 parallel directive to get the correct mapping:
4370 .omp_data_o.2.i = .omp_data_i.1->i
4372 Since there may be other workshare or parallel directives enclosing
4373 the parallel directive, it may be necessary to walk up the context
4374 parent chain. This is not a problem in general because nested
4375 parallelism happens only rarely. */
4377 static tree
4378 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4380 tree t;
4381 omp_context *up;
4383 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4384 t = maybe_lookup_decl (decl, up);
4386 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4388 return t ? t : decl;
4392 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4393 in outer contexts. */
4395 static tree
4396 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4398 tree t = NULL;
4399 omp_context *up;
4401 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4402 t = maybe_lookup_decl (decl, up);
4404 return t ? t : decl;
4408 /* Construct the initialization value for reduction operation OP. */
4410 tree
4411 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4413 switch (op)
4415 case PLUS_EXPR:
4416 case MINUS_EXPR:
4417 case BIT_IOR_EXPR:
4418 case BIT_XOR_EXPR:
4419 case TRUTH_OR_EXPR:
4420 case TRUTH_ORIF_EXPR:
4421 case TRUTH_XOR_EXPR:
4422 case NE_EXPR:
4423 return build_zero_cst (type);
4425 case MULT_EXPR:
4426 case TRUTH_AND_EXPR:
4427 case TRUTH_ANDIF_EXPR:
4428 case EQ_EXPR:
4429 return fold_convert_loc (loc, type, integer_one_node);
4431 case BIT_AND_EXPR:
4432 return fold_convert_loc (loc, type, integer_minus_one_node);
4434 case MAX_EXPR:
4435 if (SCALAR_FLOAT_TYPE_P (type))
4437 REAL_VALUE_TYPE max, min;
4438 if (HONOR_INFINITIES (type))
4440 real_inf (&max);
4441 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4443 else
4444 real_maxval (&min, 1, TYPE_MODE (type));
4445 return build_real (type, min);
4447 else if (POINTER_TYPE_P (type))
4449 wide_int min
4450 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4451 return wide_int_to_tree (type, min);
4453 else
4455 gcc_assert (INTEGRAL_TYPE_P (type));
4456 return TYPE_MIN_VALUE (type);
4459 case MIN_EXPR:
4460 if (SCALAR_FLOAT_TYPE_P (type))
4462 REAL_VALUE_TYPE max;
4463 if (HONOR_INFINITIES (type))
4464 real_inf (&max);
4465 else
4466 real_maxval (&max, 0, TYPE_MODE (type));
4467 return build_real (type, max);
4469 else if (POINTER_TYPE_P (type))
4471 wide_int max
4472 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4473 return wide_int_to_tree (type, max);
4475 else
4477 gcc_assert (INTEGRAL_TYPE_P (type));
4478 return TYPE_MAX_VALUE (type);
4481 default:
4482 gcc_unreachable ();
4486 /* Construct the initialization value for reduction CLAUSE. */
4488 tree
4489 omp_reduction_init (tree clause, tree type)
4491 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4492 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4495 /* Return alignment to be assumed for var in CLAUSE, which should be
4496 OMP_CLAUSE_ALIGNED. */
4498 static tree
4499 omp_clause_aligned_alignment (tree clause)
4501 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4502 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4504 /* Otherwise return implementation defined alignment. */
4505 unsigned int al = 1;
4506 opt_scalar_mode mode_iter;
4507 auto_vector_modes modes;
4508 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4509 static enum mode_class classes[]
4510 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4511 for (int i = 0; i < 4; i += 2)
4512 /* The for loop above dictates that we only walk through scalar classes. */
4513 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4515 scalar_mode mode = mode_iter.require ();
4516 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4517 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4518 continue;
4519 machine_mode alt_vmode;
4520 for (unsigned int j = 0; j < modes.length (); ++j)
4521 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4522 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4523 vmode = alt_vmode;
4525 tree type = lang_hooks.types.type_for_mode (mode, 1);
4526 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4527 continue;
4528 type = build_vector_type_for_mode (type, vmode);
4529 if (TYPE_MODE (type) != vmode)
4530 continue;
4531 if (TYPE_ALIGN_UNIT (type) > al)
4532 al = TYPE_ALIGN_UNIT (type);
4534 return build_int_cst (integer_type_node, al);
4538 /* This structure is part of the interface between lower_rec_simd_input_clauses
4539 and lower_rec_input_clauses. */
4541 class omplow_simd_context {
4542 public:
4543 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4544 tree idx;
4545 tree lane;
4546 tree lastlane;
4547 vec<tree, va_heap> simt_eargs;
4548 gimple_seq simt_dlist;
4549 poly_uint64_pod max_vf;
4550 bool is_simt;
4553 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4554 privatization. */
4556 static bool
4557 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4558 omplow_simd_context *sctx, tree &ivar,
4559 tree &lvar, tree *rvar = NULL,
4560 tree *rvar2 = NULL)
4562 if (known_eq (sctx->max_vf, 0U))
4564 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4565 if (maybe_gt (sctx->max_vf, 1U))
4567 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4568 OMP_CLAUSE_SAFELEN);
4569 if (c)
4571 poly_uint64 safe_len;
4572 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4573 || maybe_lt (safe_len, 1U))
4574 sctx->max_vf = 1;
4575 else
4576 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4579 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4581 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4582 c = OMP_CLAUSE_CHAIN (c))
4584 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4585 continue;
4587 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4589 /* UDR reductions are not supported yet for SIMT, disable
4590 SIMT. */
4591 sctx->max_vf = 1;
4592 break;
4595 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4596 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4598 /* Doing boolean operations on non-integral types is
4599 for conformance only, it's not worth supporting this
4600 for SIMT. */
4601 sctx->max_vf = 1;
4602 break;
4606 if (maybe_gt (sctx->max_vf, 1U))
4608 sctx->idx = create_tmp_var (unsigned_type_node);
4609 sctx->lane = create_tmp_var (unsigned_type_node);
4612 if (known_eq (sctx->max_vf, 1U))
4613 return false;
4615 if (sctx->is_simt)
4617 if (is_gimple_reg (new_var))
4619 ivar = lvar = new_var;
4620 return true;
4622 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4623 ivar = lvar = create_tmp_var (type);
4624 TREE_ADDRESSABLE (ivar) = 1;
4625 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4626 NULL, DECL_ATTRIBUTES (ivar));
4627 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4628 tree clobber = build_clobber (type);
4629 gimple *g = gimple_build_assign (ivar, clobber);
4630 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4632 else
4634 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4635 tree avar = create_tmp_var_raw (atype);
4636 if (TREE_ADDRESSABLE (new_var))
4637 TREE_ADDRESSABLE (avar) = 1;
4638 DECL_ATTRIBUTES (avar)
4639 = tree_cons (get_identifier ("omp simd array"), NULL,
4640 DECL_ATTRIBUTES (avar));
4641 gimple_add_tmp_var (avar);
4642 tree iavar = avar;
4643 if (rvar && !ctx->for_simd_scan_phase)
4645 /* For inscan reductions, create another array temporary,
4646 which will hold the reduced value. */
4647 iavar = create_tmp_var_raw (atype);
4648 if (TREE_ADDRESSABLE (new_var))
4649 TREE_ADDRESSABLE (iavar) = 1;
4650 DECL_ATTRIBUTES (iavar)
4651 = tree_cons (get_identifier ("omp simd array"), NULL,
4652 tree_cons (get_identifier ("omp simd inscan"), NULL,
4653 DECL_ATTRIBUTES (iavar)));
4654 gimple_add_tmp_var (iavar);
4655 ctx->cb.decl_map->put (avar, iavar);
4656 if (sctx->lastlane == NULL_TREE)
4657 sctx->lastlane = create_tmp_var (unsigned_type_node);
4658 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4659 sctx->lastlane, NULL_TREE, NULL_TREE);
4660 TREE_THIS_NOTRAP (*rvar) = 1;
4662 if (ctx->scan_exclusive)
4664 /* And for exclusive scan yet another one, which will
4665 hold the value during the scan phase. */
4666 tree savar = create_tmp_var_raw (atype);
4667 if (TREE_ADDRESSABLE (new_var))
4668 TREE_ADDRESSABLE (savar) = 1;
4669 DECL_ATTRIBUTES (savar)
4670 = tree_cons (get_identifier ("omp simd array"), NULL,
4671 tree_cons (get_identifier ("omp simd inscan "
4672 "exclusive"), NULL,
4673 DECL_ATTRIBUTES (savar)));
4674 gimple_add_tmp_var (savar);
4675 ctx->cb.decl_map->put (iavar, savar);
4676 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4677 sctx->idx, NULL_TREE, NULL_TREE);
4678 TREE_THIS_NOTRAP (*rvar2) = 1;
4681 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4682 NULL_TREE, NULL_TREE);
4683 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4684 NULL_TREE, NULL_TREE);
4685 TREE_THIS_NOTRAP (ivar) = 1;
4686 TREE_THIS_NOTRAP (lvar) = 1;
4688 if (DECL_P (new_var))
4690 SET_DECL_VALUE_EXPR (new_var, lvar);
4691 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4693 return true;
4696 /* Helper function of lower_rec_input_clauses. For a reference
4697 in simd reduction, add an underlying variable it will reference. */
4699 static void
4700 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4702 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4703 if (TREE_CONSTANT (z))
4705 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4706 get_name (new_vard));
4707 gimple_add_tmp_var (z);
4708 TREE_ADDRESSABLE (z) = 1;
4709 z = build_fold_addr_expr_loc (loc, z);
4710 gimplify_assign (new_vard, z, ilist);
4714 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4715 code to emit (type) (tskred_temp[idx]). */
4717 static tree
4718 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4719 unsigned idx)
4721 unsigned HOST_WIDE_INT sz
4722 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4723 tree r = build2 (MEM_REF, pointer_sized_int_node,
4724 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4725 idx * sz));
4726 tree v = create_tmp_var (pointer_sized_int_node);
4727 gimple *g = gimple_build_assign (v, r);
4728 gimple_seq_add_stmt (ilist, g);
4729 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4731 v = create_tmp_var (type);
4732 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4733 gimple_seq_add_stmt (ilist, g);
4735 return v;
4738 /* Lower early initialization of privatized variable NEW_VAR
4739 if it needs an allocator (has allocate clause). */
4741 static bool
4742 lower_private_allocate (tree var, tree new_var, tree &allocator,
4743 tree &allocate_ptr, gimple_seq *ilist,
4744 omp_context *ctx, bool is_ref, tree size)
4746 if (allocator)
4747 return false;
4748 gcc_assert (allocate_ptr == NULL_TREE);
4749 if (ctx->allocate_map
4750 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4751 if (tree *allocatorp = ctx->allocate_map->get (var))
4752 allocator = *allocatorp;
4753 if (allocator == NULL_TREE)
4754 return false;
4755 if (!is_ref && omp_privatize_by_reference (var))
4757 allocator = NULL_TREE;
4758 return false;
4761 unsigned HOST_WIDE_INT ialign = 0;
4762 if (TREE_CODE (allocator) == TREE_LIST)
4764 ialign = tree_to_uhwi (TREE_VALUE (allocator));
4765 allocator = TREE_PURPOSE (allocator);
4767 if (TREE_CODE (allocator) != INTEGER_CST)
4768 allocator = build_outer_var_ref (allocator, ctx);
4769 allocator = fold_convert (pointer_sized_int_node, allocator);
4770 if (TREE_CODE (allocator) != INTEGER_CST)
4772 tree var = create_tmp_var (TREE_TYPE (allocator));
4773 gimplify_assign (var, allocator, ilist);
4774 allocator = var;
4777 tree ptr_type, align, sz = size;
4778 if (TYPE_P (new_var))
4780 ptr_type = build_pointer_type (new_var);
4781 ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4783 else if (is_ref)
4785 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4786 ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4788 else
4790 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4791 ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4792 if (sz == NULL_TREE)
4793 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4795 align = build_int_cst (size_type_node, ialign);
4796 if (TREE_CODE (sz) != INTEGER_CST)
4798 tree szvar = create_tmp_var (size_type_node);
4799 gimplify_assign (szvar, sz, ilist);
4800 sz = szvar;
4802 allocate_ptr = create_tmp_var (ptr_type);
4803 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4804 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4805 gimple_call_set_lhs (g, allocate_ptr);
4806 gimple_seq_add_stmt (ilist, g);
4807 if (!is_ref)
4809 tree x = build_simple_mem_ref (allocate_ptr);
4810 TREE_THIS_NOTRAP (x) = 1;
4811 SET_DECL_VALUE_EXPR (new_var, x);
4812 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4814 return true;
4817 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4818 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4819 private variables. Initialization statements go in ILIST, while calls
4820 to destructors go in DLIST. */
4822 static void
4823 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4824 omp_context *ctx, struct omp_for_data *fd)
4826 tree c, copyin_seq, x, ptr;
4827 bool copyin_by_ref = false;
4828 bool lastprivate_firstprivate = false;
4829 bool reduction_omp_orig_ref = false;
4830 int pass;
4831 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4832 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4833 omplow_simd_context sctx = omplow_simd_context ();
4834 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4835 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4836 gimple_seq llist[4] = { };
4837 tree nonconst_simd_if = NULL_TREE;
4839 copyin_seq = NULL;
4840 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4842 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4843 with data sharing clauses referencing variable sized vars. That
4844 is unnecessarily hard to support and very unlikely to result in
4845 vectorized code anyway. */
4846 if (is_simd)
4847 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4848 switch (OMP_CLAUSE_CODE (c))
4850 case OMP_CLAUSE_LINEAR:
4851 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4852 sctx.max_vf = 1;
4853 /* FALLTHRU */
4854 case OMP_CLAUSE_PRIVATE:
4855 case OMP_CLAUSE_FIRSTPRIVATE:
4856 case OMP_CLAUSE_LASTPRIVATE:
4857 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4858 sctx.max_vf = 1;
4859 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4861 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4862 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4863 sctx.max_vf = 1;
4865 break;
4866 case OMP_CLAUSE_REDUCTION:
4867 case OMP_CLAUSE_IN_REDUCTION:
4868 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4869 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4870 sctx.max_vf = 1;
4871 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4873 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4874 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4875 sctx.max_vf = 1;
4877 break;
4878 case OMP_CLAUSE_IF:
4879 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4880 sctx.max_vf = 1;
4881 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4882 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4883 break;
4884 case OMP_CLAUSE_SIMDLEN:
4885 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4886 sctx.max_vf = 1;
4887 break;
4888 case OMP_CLAUSE__CONDTEMP_:
4889 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4890 if (sctx.is_simt)
4891 sctx.max_vf = 1;
4892 break;
4893 default:
4894 continue;
4897 /* Add a placeholder for simduid. */
4898 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4899 sctx.simt_eargs.safe_push (NULL_TREE);
4901 unsigned task_reduction_cnt = 0;
4902 unsigned task_reduction_cntorig = 0;
4903 unsigned task_reduction_cnt_full = 0;
4904 unsigned task_reduction_cntorig_full = 0;
4905 unsigned task_reduction_other_cnt = 0;
4906 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4907 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4908 /* Do all the fixed sized types in the first pass, and the variable sized
4909 types in the second pass. This makes sure that the scalar arguments to
4910 the variable sized types are processed before we use them in the
4911 variable sized operations. For task reductions we use 4 passes, in the
4912 first two we ignore them, in the third one gather arguments for
4913 GOMP_task_reduction_remap call and in the last pass actually handle
4914 the task reductions. */
4915 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4916 ? 4 : 2); ++pass)
4918 if (pass == 2 && task_reduction_cnt)
4920 tskred_atype
4921 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4922 + task_reduction_cntorig);
4923 tskred_avar = create_tmp_var_raw (tskred_atype);
4924 gimple_add_tmp_var (tskred_avar);
4925 TREE_ADDRESSABLE (tskred_avar) = 1;
4926 task_reduction_cnt_full = task_reduction_cnt;
4927 task_reduction_cntorig_full = task_reduction_cntorig;
4929 else if (pass == 3 && task_reduction_cnt)
4931 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4932 gimple *g
4933 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4934 size_int (task_reduction_cntorig),
4935 build_fold_addr_expr (tskred_avar));
4936 gimple_seq_add_stmt (ilist, g);
4938 if (pass == 3 && task_reduction_other_cnt)
4940 /* For reduction clauses, build
4941 tskred_base = (void *) tskred_temp[2]
4942 + omp_get_thread_num () * tskred_temp[1]
4943 or if tskred_temp[1] is known to be constant, that constant
4944 directly. This is the start of the private reduction copy block
4945 for the current thread. */
4946 tree v = create_tmp_var (integer_type_node);
4947 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4948 gimple *g = gimple_build_call (x, 0);
4949 gimple_call_set_lhs (g, v);
4950 gimple_seq_add_stmt (ilist, g);
4951 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4952 tskred_temp = OMP_CLAUSE_DECL (c);
4953 if (is_taskreg_ctx (ctx))
4954 tskred_temp = lookup_decl (tskred_temp, ctx);
4955 tree v2 = create_tmp_var (sizetype);
4956 g = gimple_build_assign (v2, NOP_EXPR, v);
4957 gimple_seq_add_stmt (ilist, g);
4958 if (ctx->task_reductions[0])
4959 v = fold_convert (sizetype, ctx->task_reductions[0]);
4960 else
4961 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4962 tree v3 = create_tmp_var (sizetype);
4963 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4964 gimple_seq_add_stmt (ilist, g);
4965 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4966 tskred_base = create_tmp_var (ptr_type_node);
4967 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4968 gimple_seq_add_stmt (ilist, g);
4970 task_reduction_cnt = 0;
4971 task_reduction_cntorig = 0;
4972 task_reduction_other_cnt = 0;
4973 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4975 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4976 tree var, new_var;
4977 bool by_ref;
4978 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4979 bool task_reduction_p = false;
4980 bool task_reduction_needs_orig_p = false;
4981 tree cond = NULL_TREE;
4982 tree allocator, allocate_ptr;
4984 switch (c_kind)
4986 case OMP_CLAUSE_PRIVATE:
4987 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4988 continue;
4989 break;
4990 case OMP_CLAUSE_SHARED:
4991 /* Ignore shared directives in teams construct inside
4992 of target construct. */
4993 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4994 && !is_host_teams_ctx (ctx))
4995 continue;
4996 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4998 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4999 || is_global_var (OMP_CLAUSE_DECL (c)));
5000 continue;
5002 case OMP_CLAUSE_FIRSTPRIVATE:
5003 case OMP_CLAUSE_COPYIN:
5004 break;
5005 case OMP_CLAUSE_LINEAR:
5006 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
5007 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5008 lastprivate_firstprivate = true;
5009 break;
5010 case OMP_CLAUSE_REDUCTION:
5011 case OMP_CLAUSE_IN_REDUCTION:
5012 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5013 || is_task_ctx (ctx)
5014 || OMP_CLAUSE_REDUCTION_TASK (c))
5016 task_reduction_p = true;
5017 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5019 task_reduction_other_cnt++;
5020 if (pass == 2)
5021 continue;
5023 else
5024 task_reduction_cnt++;
5025 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5027 var = OMP_CLAUSE_DECL (c);
5028 /* If var is a global variable that isn't privatized
5029 in outer contexts, we don't need to look up the
5030 original address, it is always the address of the
5031 global variable itself. */
5032 if (!DECL_P (var)
5033 || omp_privatize_by_reference (var)
5034 || !is_global_var
5035 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5037 task_reduction_needs_orig_p = true;
5038 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5039 task_reduction_cntorig++;
5043 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5044 reduction_omp_orig_ref = true;
5045 break;
5046 case OMP_CLAUSE__REDUCTEMP_:
5047 if (!is_taskreg_ctx (ctx))
5048 continue;
5049 /* FALLTHRU */
5050 case OMP_CLAUSE__LOOPTEMP_:
5051 /* Handle _looptemp_/_reductemp_ clauses only on
5052 parallel/task. */
5053 if (fd)
5054 continue;
5055 break;
5056 case OMP_CLAUSE_LASTPRIVATE:
5057 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5059 lastprivate_firstprivate = true;
5060 if (pass != 0 || is_taskloop_ctx (ctx))
5061 continue;
5063 /* Even without corresponding firstprivate, if
5064 decl is Fortran allocatable, it needs outer var
5065 reference. */
5066 else if (pass == 0
5067 && lang_hooks.decls.omp_private_outer_ref
5068 (OMP_CLAUSE_DECL (c)))
5069 lastprivate_firstprivate = true;
5070 break;
5071 case OMP_CLAUSE_ALIGNED:
5072 if (pass != 1)
5073 continue;
5074 var = OMP_CLAUSE_DECL (c);
5075 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5076 && !is_global_var (var))
5078 new_var = maybe_lookup_decl (var, ctx);
5079 if (new_var == NULL_TREE)
5080 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5081 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5082 tree alarg = omp_clause_aligned_alignment (c);
5083 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5084 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5085 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5086 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5087 gimplify_and_add (x, ilist);
5089 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5090 && is_global_var (var))
5092 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5093 new_var = lookup_decl (var, ctx);
5094 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5095 t = build_fold_addr_expr_loc (clause_loc, t);
5096 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5097 tree alarg = omp_clause_aligned_alignment (c);
5098 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5099 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5100 t = fold_convert_loc (clause_loc, ptype, t);
5101 x = create_tmp_var (ptype);
5102 t = build2 (MODIFY_EXPR, ptype, x, t);
5103 gimplify_and_add (t, ilist);
5104 t = build_simple_mem_ref_loc (clause_loc, x);
5105 SET_DECL_VALUE_EXPR (new_var, t);
5106 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5108 continue;
5109 case OMP_CLAUSE__CONDTEMP_:
5110 if (is_parallel_ctx (ctx)
5111 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5112 break;
5113 continue;
5114 default:
5115 continue;
5118 if (task_reduction_p != (pass >= 2))
5119 continue;
5121 allocator = NULL_TREE;
5122 allocate_ptr = NULL_TREE;
5123 new_var = var = OMP_CLAUSE_DECL (c);
5124 if ((c_kind == OMP_CLAUSE_REDUCTION
5125 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5126 && TREE_CODE (var) == MEM_REF)
5128 var = TREE_OPERAND (var, 0);
5129 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5130 var = TREE_OPERAND (var, 0);
5131 if (TREE_CODE (var) == INDIRECT_REF
5132 || TREE_CODE (var) == ADDR_EXPR)
5133 var = TREE_OPERAND (var, 0);
5134 if (is_variable_sized (var))
5136 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5137 var = DECL_VALUE_EXPR (var);
5138 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5139 var = TREE_OPERAND (var, 0);
5140 gcc_assert (DECL_P (var));
5142 new_var = var;
5144 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5146 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5147 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5149 else if (c_kind != OMP_CLAUSE_COPYIN)
5150 new_var = lookup_decl (var, ctx);
5152 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5154 if (pass != 0)
5155 continue;
5157 /* C/C++ array section reductions. */
5158 else if ((c_kind == OMP_CLAUSE_REDUCTION
5159 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5160 && var != OMP_CLAUSE_DECL (c))
5162 if (pass == 0)
5163 continue;
5165 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5166 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5168 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5170 tree b = TREE_OPERAND (orig_var, 1);
5171 if (is_omp_target (ctx->stmt))
5172 b = NULL_TREE;
5173 else
5174 b = maybe_lookup_decl (b, ctx);
5175 if (b == NULL)
5177 b = TREE_OPERAND (orig_var, 1);
5178 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5180 if (integer_zerop (bias))
5181 bias = b;
5182 else
5184 bias = fold_convert_loc (clause_loc,
5185 TREE_TYPE (b), bias);
5186 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5187 TREE_TYPE (b), b, bias);
5189 orig_var = TREE_OPERAND (orig_var, 0);
5191 if (pass == 2)
5193 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5194 if (is_global_var (out)
5195 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5196 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5197 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5198 != POINTER_TYPE)))
5199 x = var;
5200 else if (is_omp_target (ctx->stmt))
5201 x = out;
5202 else
5204 bool by_ref = use_pointer_for_field (var, NULL);
5205 x = build_receiver_ref (var, by_ref, ctx);
5206 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5207 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5208 == POINTER_TYPE))
5209 x = build_fold_addr_expr (x);
5211 if (TREE_CODE (orig_var) == INDIRECT_REF)
5212 x = build_simple_mem_ref (x);
5213 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5215 if (var == TREE_OPERAND (orig_var, 0))
5216 x = build_fold_addr_expr (x);
5218 bias = fold_convert (sizetype, bias);
5219 x = fold_convert (ptr_type_node, x);
5220 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5221 TREE_TYPE (x), x, bias);
5222 unsigned cnt = task_reduction_cnt - 1;
5223 if (!task_reduction_needs_orig_p)
5224 cnt += (task_reduction_cntorig_full
5225 - task_reduction_cntorig);
5226 else
5227 cnt = task_reduction_cntorig - 1;
5228 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5229 size_int (cnt), NULL_TREE, NULL_TREE);
5230 gimplify_assign (r, x, ilist);
5231 continue;
5234 if (TREE_CODE (orig_var) == INDIRECT_REF
5235 || TREE_CODE (orig_var) == ADDR_EXPR)
5236 orig_var = TREE_OPERAND (orig_var, 0);
5237 tree d = OMP_CLAUSE_DECL (c);
5238 tree type = TREE_TYPE (d);
5239 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5240 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5241 tree sz = v;
5242 const char *name = get_name (orig_var);
5243 if (pass != 3 && !TREE_CONSTANT (v))
5245 tree t;
5246 if (is_omp_target (ctx->stmt))
5247 t = NULL_TREE;
5248 else
5249 t = maybe_lookup_decl (v, ctx);
5250 if (t)
5251 v = t;
5252 else
5253 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5254 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5255 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5256 TREE_TYPE (v), v,
5257 build_int_cst (TREE_TYPE (v), 1));
5258 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5259 TREE_TYPE (v), t,
5260 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5262 if (pass == 3)
5264 tree xv = create_tmp_var (ptr_type_node);
5265 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5267 unsigned cnt = task_reduction_cnt - 1;
5268 if (!task_reduction_needs_orig_p)
5269 cnt += (task_reduction_cntorig_full
5270 - task_reduction_cntorig);
5271 else
5272 cnt = task_reduction_cntorig - 1;
5273 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5274 size_int (cnt), NULL_TREE, NULL_TREE);
5276 gimple *g = gimple_build_assign (xv, x);
5277 gimple_seq_add_stmt (ilist, g);
5279 else
5281 unsigned int idx = *ctx->task_reduction_map->get (c);
5282 tree off;
5283 if (ctx->task_reductions[1 + idx])
5284 off = fold_convert (sizetype,
5285 ctx->task_reductions[1 + idx]);
5286 else
5287 off = task_reduction_read (ilist, tskred_temp, sizetype,
5288 7 + 3 * idx + 1);
5289 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5290 tskred_base, off);
5291 gimple_seq_add_stmt (ilist, g);
5293 x = fold_convert (build_pointer_type (boolean_type_node),
5294 xv);
5295 if (TREE_CONSTANT (v))
5296 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5297 TYPE_SIZE_UNIT (type));
5298 else
5300 tree t;
5301 if (is_omp_target (ctx->stmt))
5302 t = NULL_TREE;
5303 else
5304 t = maybe_lookup_decl (v, ctx);
5305 if (t)
5306 v = t;
5307 else
5308 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5309 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5310 fb_rvalue);
5311 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5312 TREE_TYPE (v), v,
5313 build_int_cst (TREE_TYPE (v), 1));
5314 t = fold_build2_loc (clause_loc, MULT_EXPR,
5315 TREE_TYPE (v), t,
5316 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5317 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5319 cond = create_tmp_var (TREE_TYPE (x));
5320 gimplify_assign (cond, x, ilist);
5321 x = xv;
5323 else if (lower_private_allocate (var, type, allocator,
5324 allocate_ptr, ilist, ctx,
5325 true,
5326 TREE_CONSTANT (v)
5327 ? TYPE_SIZE_UNIT (type)
5328 : sz))
5329 x = allocate_ptr;
5330 else if (TREE_CONSTANT (v))
5332 x = create_tmp_var_raw (type, name);
5333 gimple_add_tmp_var (x);
5334 TREE_ADDRESSABLE (x) = 1;
5335 x = build_fold_addr_expr_loc (clause_loc, x);
5337 else
5339 tree atmp
5340 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5341 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5342 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5345 tree ptype = build_pointer_type (TREE_TYPE (type));
5346 x = fold_convert_loc (clause_loc, ptype, x);
5347 tree y = create_tmp_var (ptype, name);
5348 gimplify_assign (y, x, ilist);
5349 x = y;
5350 tree yb = y;
5352 if (!integer_zerop (bias))
5354 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5355 bias);
5356 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5358 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5359 pointer_sized_int_node, yb, bias);
5360 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5361 yb = create_tmp_var (ptype, name);
5362 gimplify_assign (yb, x, ilist);
5363 x = yb;
5366 d = TREE_OPERAND (d, 0);
5367 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5368 d = TREE_OPERAND (d, 0);
5369 if (TREE_CODE (d) == ADDR_EXPR)
5371 if (orig_var != var)
5373 gcc_assert (is_variable_sized (orig_var));
5374 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5376 gimplify_assign (new_var, x, ilist);
5377 tree new_orig_var = lookup_decl (orig_var, ctx);
5378 tree t = build_fold_indirect_ref (new_var);
5379 DECL_IGNORED_P (new_var) = 0;
5380 TREE_THIS_NOTRAP (t) = 1;
5381 SET_DECL_VALUE_EXPR (new_orig_var, t);
5382 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5384 else
5386 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5387 build_int_cst (ptype, 0));
5388 SET_DECL_VALUE_EXPR (new_var, x);
5389 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5392 else
5394 gcc_assert (orig_var == var);
5395 if (TREE_CODE (d) == INDIRECT_REF)
5397 x = create_tmp_var (ptype, name);
5398 TREE_ADDRESSABLE (x) = 1;
5399 gimplify_assign (x, yb, ilist);
5400 x = build_fold_addr_expr_loc (clause_loc, x);
5402 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5403 gimplify_assign (new_var, x, ilist);
5405 /* GOMP_taskgroup_reduction_register memsets the whole
5406 array to zero. If the initializer is zero, we don't
5407 need to initialize it again, just mark it as ever
5408 used unconditionally, i.e. cond = true. */
5409 if (cond
5410 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5411 && initializer_zerop (omp_reduction_init (c,
5412 TREE_TYPE (type))))
5414 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5415 boolean_true_node);
5416 gimple_seq_add_stmt (ilist, g);
5417 continue;
5419 tree end = create_artificial_label (UNKNOWN_LOCATION);
5420 if (cond)
5422 gimple *g;
5423 if (!is_parallel_ctx (ctx))
5425 tree condv = create_tmp_var (boolean_type_node);
5426 g = gimple_build_assign (condv,
5427 build_simple_mem_ref (cond));
5428 gimple_seq_add_stmt (ilist, g);
5429 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5430 g = gimple_build_cond (NE_EXPR, condv,
5431 boolean_false_node, end, lab1);
5432 gimple_seq_add_stmt (ilist, g);
5433 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5435 g = gimple_build_assign (build_simple_mem_ref (cond),
5436 boolean_true_node);
5437 gimple_seq_add_stmt (ilist, g);
5440 tree y1 = create_tmp_var (ptype);
5441 gimplify_assign (y1, y, ilist);
5442 tree i2 = NULL_TREE, y2 = NULL_TREE;
5443 tree body2 = NULL_TREE, end2 = NULL_TREE;
5444 tree y3 = NULL_TREE, y4 = NULL_TREE;
5445 if (task_reduction_needs_orig_p)
5447 y3 = create_tmp_var (ptype);
5448 tree ref;
5449 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5450 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5451 size_int (task_reduction_cnt_full
5452 + task_reduction_cntorig - 1),
5453 NULL_TREE, NULL_TREE);
5454 else
5456 unsigned int idx = *ctx->task_reduction_map->get (c);
5457 ref = task_reduction_read (ilist, tskred_temp, ptype,
5458 7 + 3 * idx);
5460 gimplify_assign (y3, ref, ilist);
5462 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5464 if (pass != 3)
5466 y2 = create_tmp_var (ptype);
5467 gimplify_assign (y2, y, ilist);
5469 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5471 tree ref = build_outer_var_ref (var, ctx);
5472 /* For ref build_outer_var_ref already performs this. */
5473 if (TREE_CODE (d) == INDIRECT_REF)
5474 gcc_assert (omp_privatize_by_reference (var));
5475 else if (TREE_CODE (d) == ADDR_EXPR)
5476 ref = build_fold_addr_expr (ref);
5477 else if (omp_privatize_by_reference (var))
5478 ref = build_fold_addr_expr (ref);
5479 ref = fold_convert_loc (clause_loc, ptype, ref);
5480 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5481 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5483 y3 = create_tmp_var (ptype);
5484 gimplify_assign (y3, unshare_expr (ref), ilist);
5486 if (is_simd)
5488 y4 = create_tmp_var (ptype);
5489 gimplify_assign (y4, ref, dlist);
5493 tree i = create_tmp_var (TREE_TYPE (v));
5494 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5495 tree body = create_artificial_label (UNKNOWN_LOCATION);
5496 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5497 if (y2)
5499 i2 = create_tmp_var (TREE_TYPE (v));
5500 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5501 body2 = create_artificial_label (UNKNOWN_LOCATION);
5502 end2 = create_artificial_label (UNKNOWN_LOCATION);
5503 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5505 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5507 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5508 tree decl_placeholder
5509 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5510 SET_DECL_VALUE_EXPR (decl_placeholder,
5511 build_simple_mem_ref (y1));
5512 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5513 SET_DECL_VALUE_EXPR (placeholder,
5514 y3 ? build_simple_mem_ref (y3)
5515 : error_mark_node);
5516 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5517 x = lang_hooks.decls.omp_clause_default_ctor
5518 (c, build_simple_mem_ref (y1),
5519 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5520 if (x)
5521 gimplify_and_add (x, ilist);
5522 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5524 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5525 lower_omp (&tseq, ctx);
5526 gimple_seq_add_seq (ilist, tseq);
5528 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5529 if (is_simd)
5531 SET_DECL_VALUE_EXPR (decl_placeholder,
5532 build_simple_mem_ref (y2));
5533 SET_DECL_VALUE_EXPR (placeholder,
5534 build_simple_mem_ref (y4));
5535 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5536 lower_omp (&tseq, ctx);
5537 gimple_seq_add_seq (dlist, tseq);
5538 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5540 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5541 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5542 if (y2)
5544 x = lang_hooks.decls.omp_clause_dtor
5545 (c, build_simple_mem_ref (y2));
5546 if (x)
5547 gimplify_and_add (x, dlist);
5550 else
5552 x = omp_reduction_init (c, TREE_TYPE (type));
5553 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5555 /* reduction(-:var) sums up the partial results, so it
5556 acts identically to reduction(+:var). */
5557 if (code == MINUS_EXPR)
5558 code = PLUS_EXPR;
5560 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5561 if (is_simd)
5563 x = build2 (code, TREE_TYPE (type),
5564 build_simple_mem_ref (y4),
5565 build_simple_mem_ref (y2));
5566 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5569 gimple *g
5570 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5571 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5572 gimple_seq_add_stmt (ilist, g);
5573 if (y3)
5575 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5576 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5577 gimple_seq_add_stmt (ilist, g);
5579 g = gimple_build_assign (i, PLUS_EXPR, i,
5580 build_int_cst (TREE_TYPE (i), 1));
5581 gimple_seq_add_stmt (ilist, g);
5582 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5583 gimple_seq_add_stmt (ilist, g);
5584 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5585 if (y2)
5587 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5588 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5589 gimple_seq_add_stmt (dlist, g);
5590 if (y4)
5592 g = gimple_build_assign
5593 (y4, POINTER_PLUS_EXPR, y4,
5594 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5595 gimple_seq_add_stmt (dlist, g);
5597 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5598 build_int_cst (TREE_TYPE (i2), 1));
5599 gimple_seq_add_stmt (dlist, g);
5600 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5601 gimple_seq_add_stmt (dlist, g);
5602 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5604 if (allocator)
5606 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5607 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5608 gimple_seq_add_stmt (dlist, g);
5610 continue;
5612 else if (pass == 2)
5614 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5615 if (is_global_var (out))
5616 x = var;
5617 else if (is_omp_target (ctx->stmt))
5618 x = out;
5619 else
5621 bool by_ref = use_pointer_for_field (var, ctx);
5622 x = build_receiver_ref (var, by_ref, ctx);
5624 if (!omp_privatize_by_reference (var))
5625 x = build_fold_addr_expr (x);
5626 x = fold_convert (ptr_type_node, x);
5627 unsigned cnt = task_reduction_cnt - 1;
5628 if (!task_reduction_needs_orig_p)
5629 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5630 else
5631 cnt = task_reduction_cntorig - 1;
5632 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5633 size_int (cnt), NULL_TREE, NULL_TREE);
5634 gimplify_assign (r, x, ilist);
5635 continue;
5637 else if (pass == 3)
5639 tree type = TREE_TYPE (new_var);
5640 if (!omp_privatize_by_reference (var))
5641 type = build_pointer_type (type);
5642 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5644 unsigned cnt = task_reduction_cnt - 1;
5645 if (!task_reduction_needs_orig_p)
5646 cnt += (task_reduction_cntorig_full
5647 - task_reduction_cntorig);
5648 else
5649 cnt = task_reduction_cntorig - 1;
5650 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5651 size_int (cnt), NULL_TREE, NULL_TREE);
5653 else
5655 unsigned int idx = *ctx->task_reduction_map->get (c);
5656 tree off;
5657 if (ctx->task_reductions[1 + idx])
5658 off = fold_convert (sizetype,
5659 ctx->task_reductions[1 + idx]);
5660 else
5661 off = task_reduction_read (ilist, tskred_temp, sizetype,
5662 7 + 3 * idx + 1);
5663 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5664 tskred_base, off);
5666 x = fold_convert (type, x);
5667 tree t;
5668 if (omp_privatize_by_reference (var))
5670 gimplify_assign (new_var, x, ilist);
5671 t = new_var;
5672 new_var = build_simple_mem_ref (new_var);
5674 else
5676 t = create_tmp_var (type);
5677 gimplify_assign (t, x, ilist);
5678 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5679 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5681 t = fold_convert (build_pointer_type (boolean_type_node), t);
5682 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5683 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5684 cond = create_tmp_var (TREE_TYPE (t));
5685 gimplify_assign (cond, t, ilist);
5687 else if (is_variable_sized (var))
5689 /* For variable sized types, we need to allocate the
5690 actual storage here. Call alloca and store the
5691 result in the pointer decl that we created elsewhere. */
5692 if (pass == 0)
5693 continue;
5695 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5697 tree tmp;
5699 ptr = DECL_VALUE_EXPR (new_var);
5700 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5701 ptr = TREE_OPERAND (ptr, 0);
5702 gcc_assert (DECL_P (ptr));
5703 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5705 if (lower_private_allocate (var, new_var, allocator,
5706 allocate_ptr, ilist, ctx,
5707 false, x))
5708 tmp = allocate_ptr;
5709 else
5711 /* void *tmp = __builtin_alloca */
5712 tree atmp
5713 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5714 gcall *stmt
5715 = gimple_build_call (atmp, 2, x,
5716 size_int (DECL_ALIGN (var)));
5717 cfun->calls_alloca = 1;
5718 tmp = create_tmp_var_raw (ptr_type_node);
5719 gimple_add_tmp_var (tmp);
5720 gimple_call_set_lhs (stmt, tmp);
5722 gimple_seq_add_stmt (ilist, stmt);
5725 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5726 gimplify_assign (ptr, x, ilist);
5729 else if (omp_privatize_by_reference (var)
5730 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5731 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5733 /* For references that are being privatized for Fortran,
5734 allocate new backing storage for the new pointer
5735 variable. This allows us to avoid changing all the
5736 code that expects a pointer to something that expects
5737 a direct variable. */
5738 if (pass == 0)
5739 continue;
5741 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5742 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5744 x = build_receiver_ref (var, false, ctx);
5745 if (ctx->allocate_map)
5746 if (tree *allocatep = ctx->allocate_map->get (var))
5748 allocator = *allocatep;
5749 if (TREE_CODE (allocator) == TREE_LIST)
5750 allocator = TREE_PURPOSE (allocator);
5751 if (TREE_CODE (allocator) != INTEGER_CST)
5752 allocator = build_outer_var_ref (allocator, ctx);
5753 allocator = fold_convert (pointer_sized_int_node,
5754 allocator);
5755 allocate_ptr = unshare_expr (x);
5757 if (allocator == NULL_TREE)
5758 x = build_fold_addr_expr_loc (clause_loc, x);
5760 else if (lower_private_allocate (var, new_var, allocator,
5761 allocate_ptr,
5762 ilist, ctx, true, x))
5763 x = allocate_ptr;
5764 else if (TREE_CONSTANT (x))
5766 /* For reduction in SIMD loop, defer adding the
5767 initialization of the reference, because if we decide
5768 to use SIMD array for it, the initilization could cause
5769 expansion ICE. Ditto for other privatization clauses. */
5770 if (is_simd)
5771 x = NULL_TREE;
5772 else
5774 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5775 get_name (var));
5776 gimple_add_tmp_var (x);
5777 TREE_ADDRESSABLE (x) = 1;
5778 x = build_fold_addr_expr_loc (clause_loc, x);
5781 else
5783 tree atmp
5784 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5785 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5786 tree al = size_int (TYPE_ALIGN (rtype));
5787 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5790 if (x)
5792 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5793 gimplify_assign (new_var, x, ilist);
5796 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5798 else if ((c_kind == OMP_CLAUSE_REDUCTION
5799 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5800 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5802 if (pass == 0)
5803 continue;
5805 else if (pass != 0)
5806 continue;
5808 switch (OMP_CLAUSE_CODE (c))
5810 case OMP_CLAUSE_SHARED:
5811 /* Ignore shared directives in teams construct inside
5812 target construct. */
5813 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5814 && !is_host_teams_ctx (ctx))
5815 continue;
5816 /* Shared global vars are just accessed directly. */
5817 if (is_global_var (new_var))
5818 break;
5819 /* For taskloop firstprivate/lastprivate, represented
5820 as firstprivate and shared clause on the task, new_var
5821 is the firstprivate var. */
5822 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5823 break;
5824 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5825 needs to be delayed until after fixup_child_record_type so
5826 that we get the correct type during the dereference. */
5827 by_ref = use_pointer_for_field (var, ctx);
5828 x = build_receiver_ref (var, by_ref, ctx);
5829 SET_DECL_VALUE_EXPR (new_var, x);
5830 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5832 /* ??? If VAR is not passed by reference, and the variable
5833 hasn't been initialized yet, then we'll get a warning for
5834 the store into the omp_data_s structure. Ideally, we'd be
5835 able to notice this and not store anything at all, but
5836 we're generating code too early. Suppress the warning. */
5837 if (!by_ref)
5838 suppress_warning (var, OPT_Wuninitialized);
5839 break;
5841 case OMP_CLAUSE__CONDTEMP_:
5842 if (is_parallel_ctx (ctx))
5844 x = build_receiver_ref (var, false, ctx);
5845 SET_DECL_VALUE_EXPR (new_var, x);
5846 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5848 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5850 x = build_zero_cst (TREE_TYPE (var));
5851 goto do_private;
5853 break;
5855 case OMP_CLAUSE_LASTPRIVATE:
5856 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5857 break;
5858 /* FALLTHRU */
5860 case OMP_CLAUSE_PRIVATE:
5861 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5862 x = build_outer_var_ref (var, ctx);
5863 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5865 if (is_task_ctx (ctx))
5866 x = build_receiver_ref (var, false, ctx);
5867 else
5868 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5870 else
5871 x = NULL;
5872 do_private:
5873 tree nx;
5874 bool copy_ctor;
5875 copy_ctor = false;
5876 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5877 ilist, ctx, false, NULL_TREE);
5878 nx = unshare_expr (new_var);
5879 if (is_simd
5880 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5881 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5882 copy_ctor = true;
5883 if (copy_ctor)
5884 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5885 else
5886 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5887 if (is_simd)
5889 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5890 if ((TREE_ADDRESSABLE (new_var) || nx || y
5891 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5892 && (gimple_omp_for_collapse (ctx->stmt) != 1
5893 || (gimple_omp_for_index (ctx->stmt, 0)
5894 != new_var)))
5895 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5896 || omp_privatize_by_reference (var))
5897 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5898 ivar, lvar))
5900 if (omp_privatize_by_reference (var))
5902 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5903 tree new_vard = TREE_OPERAND (new_var, 0);
5904 gcc_assert (DECL_P (new_vard));
5905 SET_DECL_VALUE_EXPR (new_vard,
5906 build_fold_addr_expr (lvar));
5907 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5910 if (nx)
5912 tree iv = unshare_expr (ivar);
5913 if (copy_ctor)
5914 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5916 else
5917 x = lang_hooks.decls.omp_clause_default_ctor (c,
5921 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5923 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5924 unshare_expr (ivar), x);
5925 nx = x;
5927 if (nx && x)
5928 gimplify_and_add (x, &llist[0]);
5929 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5930 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5932 tree v = new_var;
5933 if (!DECL_P (v))
5935 gcc_assert (TREE_CODE (v) == MEM_REF);
5936 v = TREE_OPERAND (v, 0);
5937 gcc_assert (DECL_P (v));
5939 v = *ctx->lastprivate_conditional_map->get (v);
5940 tree t = create_tmp_var (TREE_TYPE (v));
5941 tree z = build_zero_cst (TREE_TYPE (v));
5942 tree orig_v
5943 = build_outer_var_ref (var, ctx,
5944 OMP_CLAUSE_LASTPRIVATE);
5945 gimple_seq_add_stmt (dlist,
5946 gimple_build_assign (t, z));
5947 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5948 tree civar = DECL_VALUE_EXPR (v);
5949 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5950 civar = unshare_expr (civar);
5951 TREE_OPERAND (civar, 1) = sctx.idx;
5952 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5953 unshare_expr (civar));
5954 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5955 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5956 orig_v, unshare_expr (ivar)));
5957 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5958 civar);
5959 x = build3 (COND_EXPR, void_type_node, cond, x,
5960 void_node);
5961 gimple_seq tseq = NULL;
5962 gimplify_and_add (x, &tseq);
5963 if (ctx->outer)
5964 lower_omp (&tseq, ctx->outer);
5965 gimple_seq_add_seq (&llist[1], tseq);
5967 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5968 && ctx->for_simd_scan_phase)
5970 x = unshare_expr (ivar);
5971 tree orig_v
5972 = build_outer_var_ref (var, ctx,
5973 OMP_CLAUSE_LASTPRIVATE);
5974 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5975 orig_v);
5976 gimplify_and_add (x, &llist[0]);
5978 if (y)
5980 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5981 if (y)
5982 gimplify_and_add (y, &llist[1]);
5984 break;
5986 if (omp_privatize_by_reference (var))
5988 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5989 tree new_vard = TREE_OPERAND (new_var, 0);
5990 gcc_assert (DECL_P (new_vard));
5991 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5992 x = TYPE_SIZE_UNIT (type);
5993 if (TREE_CONSTANT (x))
5995 x = create_tmp_var_raw (type, get_name (var));
5996 gimple_add_tmp_var (x);
5997 TREE_ADDRESSABLE (x) = 1;
5998 x = build_fold_addr_expr_loc (clause_loc, x);
5999 x = fold_convert_loc (clause_loc,
6000 TREE_TYPE (new_vard), x);
6001 gimplify_assign (new_vard, x, ilist);
6005 if (nx)
6006 gimplify_and_add (nx, ilist);
6007 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6008 && is_simd
6009 && ctx->for_simd_scan_phase)
6011 tree orig_v = build_outer_var_ref (var, ctx,
6012 OMP_CLAUSE_LASTPRIVATE);
6013 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
6014 orig_v);
6015 gimplify_and_add (x, ilist);
6017 /* FALLTHRU */
6019 do_dtor:
6020 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
6021 if (x)
6022 gimplify_and_add (x, dlist);
6023 if (allocator)
6025 if (!is_gimple_val (allocator))
6027 tree avar = create_tmp_var (TREE_TYPE (allocator));
6028 gimplify_assign (avar, allocator, dlist);
6029 allocator = avar;
6031 if (!is_gimple_val (allocate_ptr))
6033 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6034 gimplify_assign (apvar, allocate_ptr, dlist);
6035 allocate_ptr = apvar;
6037 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
6038 gimple *g
6039 = gimple_build_call (f, 2, allocate_ptr, allocator);
6040 gimple_seq_add_stmt (dlist, g);
6042 break;
6044 case OMP_CLAUSE_LINEAR:
6045 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6046 goto do_firstprivate;
6047 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6048 x = NULL;
6049 else
6050 x = build_outer_var_ref (var, ctx);
6051 goto do_private;
6053 case OMP_CLAUSE_FIRSTPRIVATE:
6054 if (is_task_ctx (ctx))
6056 if ((omp_privatize_by_reference (var)
6057 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6058 || is_variable_sized (var))
6059 goto do_dtor;
6060 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
6061 ctx))
6062 || use_pointer_for_field (var, NULL))
6064 x = build_receiver_ref (var, false, ctx);
6065 if (ctx->allocate_map)
6066 if (tree *allocatep = ctx->allocate_map->get (var))
6068 allocator = *allocatep;
6069 if (TREE_CODE (allocator) == TREE_LIST)
6070 allocator = TREE_PURPOSE (allocator);
6071 if (TREE_CODE (allocator) != INTEGER_CST)
6072 allocator = build_outer_var_ref (allocator, ctx);
6073 allocator = fold_convert (pointer_sized_int_node,
6074 allocator);
6075 allocate_ptr = unshare_expr (x);
6076 x = build_simple_mem_ref (x);
6077 TREE_THIS_NOTRAP (x) = 1;
6079 SET_DECL_VALUE_EXPR (new_var, x);
6080 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6081 goto do_dtor;
6084 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6085 && omp_privatize_by_reference (var))
6087 x = build_outer_var_ref (var, ctx);
6088 gcc_assert (TREE_CODE (x) == MEM_REF
6089 && integer_zerop (TREE_OPERAND (x, 1)));
6090 x = TREE_OPERAND (x, 0);
6091 x = lang_hooks.decls.omp_clause_copy_ctor
6092 (c, unshare_expr (new_var), x);
6093 gimplify_and_add (x, ilist);
6094 goto do_dtor;
6096 do_firstprivate:
6097 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6098 ilist, ctx, false, NULL_TREE);
6099 x = build_outer_var_ref (var, ctx);
6100 if (is_simd)
6102 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6103 && gimple_omp_for_combined_into_p (ctx->stmt))
6105 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6106 tree stept = TREE_TYPE (t);
6107 tree ct = omp_find_clause (clauses,
6108 OMP_CLAUSE__LOOPTEMP_);
6109 gcc_assert (ct);
6110 tree l = OMP_CLAUSE_DECL (ct);
6111 tree n1 = fd->loop.n1;
6112 tree step = fd->loop.step;
6113 tree itype = TREE_TYPE (l);
6114 if (POINTER_TYPE_P (itype))
6115 itype = signed_type_for (itype);
6116 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6117 if (TYPE_UNSIGNED (itype)
6118 && fd->loop.cond_code == GT_EXPR)
6119 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6120 fold_build1 (NEGATE_EXPR, itype, l),
6121 fold_build1 (NEGATE_EXPR,
6122 itype, step));
6123 else
6124 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6125 t = fold_build2 (MULT_EXPR, stept,
6126 fold_convert (stept, l), t);
6128 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6130 if (omp_privatize_by_reference (var))
6132 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6133 tree new_vard = TREE_OPERAND (new_var, 0);
6134 gcc_assert (DECL_P (new_vard));
6135 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6136 nx = TYPE_SIZE_UNIT (type);
6137 if (TREE_CONSTANT (nx))
6139 nx = create_tmp_var_raw (type,
6140 get_name (var));
6141 gimple_add_tmp_var (nx);
6142 TREE_ADDRESSABLE (nx) = 1;
6143 nx = build_fold_addr_expr_loc (clause_loc,
6144 nx);
6145 nx = fold_convert_loc (clause_loc,
6146 TREE_TYPE (new_vard),
6147 nx);
6148 gimplify_assign (new_vard, nx, ilist);
6152 x = lang_hooks.decls.omp_clause_linear_ctor
6153 (c, new_var, x, t);
6154 gimplify_and_add (x, ilist);
6155 goto do_dtor;
6158 if (POINTER_TYPE_P (TREE_TYPE (x)))
6159 x = fold_build2 (POINTER_PLUS_EXPR,
6160 TREE_TYPE (x), x, t);
6161 else
6162 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
6165 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6166 || TREE_ADDRESSABLE (new_var)
6167 || omp_privatize_by_reference (var))
6168 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6169 ivar, lvar))
6171 if (omp_privatize_by_reference (var))
6173 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6174 tree new_vard = TREE_OPERAND (new_var, 0);
6175 gcc_assert (DECL_P (new_vard));
6176 SET_DECL_VALUE_EXPR (new_vard,
6177 build_fold_addr_expr (lvar));
6178 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6180 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6182 tree iv = create_tmp_var (TREE_TYPE (new_var));
6183 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6184 gimplify_and_add (x, ilist);
6185 gimple_stmt_iterator gsi
6186 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6187 gassign *g
6188 = gimple_build_assign (unshare_expr (lvar), iv);
6189 gsi_insert_before_without_update (&gsi, g,
6190 GSI_SAME_STMT);
6191 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6192 enum tree_code code = PLUS_EXPR;
6193 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6194 code = POINTER_PLUS_EXPR;
6195 g = gimple_build_assign (iv, code, iv, t);
6196 gsi_insert_before_without_update (&gsi, g,
6197 GSI_SAME_STMT);
6198 break;
6200 x = lang_hooks.decls.omp_clause_copy_ctor
6201 (c, unshare_expr (ivar), x);
6202 gimplify_and_add (x, &llist[0]);
6203 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6204 if (x)
6205 gimplify_and_add (x, &llist[1]);
6206 break;
6208 if (omp_privatize_by_reference (var))
6210 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6211 tree new_vard = TREE_OPERAND (new_var, 0);
6212 gcc_assert (DECL_P (new_vard));
6213 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6214 nx = TYPE_SIZE_UNIT (type);
6215 if (TREE_CONSTANT (nx))
6217 nx = create_tmp_var_raw (type, get_name (var));
6218 gimple_add_tmp_var (nx);
6219 TREE_ADDRESSABLE (nx) = 1;
6220 nx = build_fold_addr_expr_loc (clause_loc, nx);
6221 nx = fold_convert_loc (clause_loc,
6222 TREE_TYPE (new_vard), nx);
6223 gimplify_assign (new_vard, nx, ilist);
6227 x = lang_hooks.decls.omp_clause_copy_ctor
6228 (c, unshare_expr (new_var), x);
6229 gimplify_and_add (x, ilist);
6230 goto do_dtor;
6232 case OMP_CLAUSE__LOOPTEMP_:
6233 case OMP_CLAUSE__REDUCTEMP_:
6234 gcc_assert (is_taskreg_ctx (ctx));
6235 x = build_outer_var_ref (var, ctx);
6236 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6237 gimplify_and_add (x, ilist);
6238 break;
6240 case OMP_CLAUSE_COPYIN:
6241 by_ref = use_pointer_for_field (var, NULL);
6242 x = build_receiver_ref (var, by_ref, ctx);
6243 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6244 append_to_statement_list (x, &copyin_seq);
6245 copyin_by_ref |= by_ref;
6246 break;
6248 case OMP_CLAUSE_REDUCTION:
6249 case OMP_CLAUSE_IN_REDUCTION:
6250 /* OpenACC reductions are initialized using the
6251 GOACC_REDUCTION internal function. */
6252 if (is_gimple_omp_oacc (ctx->stmt))
6253 break;
6254 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6256 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6257 gimple *tseq;
6258 tree ptype = TREE_TYPE (placeholder);
6259 if (cond)
6261 x = error_mark_node;
6262 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6263 && !task_reduction_needs_orig_p)
6264 x = var;
6265 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6267 tree pptype = build_pointer_type (ptype);
6268 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6269 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6270 size_int (task_reduction_cnt_full
6271 + task_reduction_cntorig - 1),
6272 NULL_TREE, NULL_TREE);
6273 else
6275 unsigned int idx
6276 = *ctx->task_reduction_map->get (c);
6277 x = task_reduction_read (ilist, tskred_temp,
6278 pptype, 7 + 3 * idx);
6280 x = fold_convert (pptype, x);
6281 x = build_simple_mem_ref (x);
6284 else
6286 lower_private_allocate (var, new_var, allocator,
6287 allocate_ptr, ilist, ctx, false,
6288 NULL_TREE);
6289 x = build_outer_var_ref (var, ctx);
6291 if (omp_privatize_by_reference (var)
6292 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6293 x = build_fold_addr_expr_loc (clause_loc, x);
6295 SET_DECL_VALUE_EXPR (placeholder, x);
6296 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6297 tree new_vard = new_var;
6298 if (omp_privatize_by_reference (var))
6300 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6301 new_vard = TREE_OPERAND (new_var, 0);
6302 gcc_assert (DECL_P (new_vard));
6304 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6305 if (is_simd
6306 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6307 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6308 rvarp = &rvar;
6309 if (is_simd
6310 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6311 ivar, lvar, rvarp,
6312 &rvar2))
6314 if (new_vard == new_var)
6316 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6317 SET_DECL_VALUE_EXPR (new_var, ivar);
6319 else
6321 SET_DECL_VALUE_EXPR (new_vard,
6322 build_fold_addr_expr (ivar));
6323 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6325 x = lang_hooks.decls.omp_clause_default_ctor
6326 (c, unshare_expr (ivar),
6327 build_outer_var_ref (var, ctx));
6328 if (rvarp && ctx->for_simd_scan_phase)
6330 if (x)
6331 gimplify_and_add (x, &llist[0]);
6332 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6333 if (x)
6334 gimplify_and_add (x, &llist[1]);
6335 break;
6337 else if (rvarp)
6339 if (x)
6341 gimplify_and_add (x, &llist[0]);
6343 tree ivar2 = unshare_expr (lvar);
6344 TREE_OPERAND (ivar2, 1) = sctx.idx;
6345 x = lang_hooks.decls.omp_clause_default_ctor
6346 (c, ivar2, build_outer_var_ref (var, ctx));
6347 gimplify_and_add (x, &llist[0]);
6349 if (rvar2)
6351 x = lang_hooks.decls.omp_clause_default_ctor
6352 (c, unshare_expr (rvar2),
6353 build_outer_var_ref (var, ctx));
6354 gimplify_and_add (x, &llist[0]);
6357 /* For types that need construction, add another
6358 private var which will be default constructed
6359 and optionally initialized with
6360 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6361 loop we want to assign this value instead of
6362 constructing and destructing it in each
6363 iteration. */
6364 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6365 gimple_add_tmp_var (nv);
6366 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6367 ? rvar2
6368 : ivar, 0),
6369 nv);
6370 x = lang_hooks.decls.omp_clause_default_ctor
6371 (c, nv, build_outer_var_ref (var, ctx));
6372 gimplify_and_add (x, ilist);
6374 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6376 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6377 x = DECL_VALUE_EXPR (new_vard);
6378 tree vexpr = nv;
6379 if (new_vard != new_var)
6380 vexpr = build_fold_addr_expr (nv);
6381 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6382 lower_omp (&tseq, ctx);
6383 SET_DECL_VALUE_EXPR (new_vard, x);
6384 gimple_seq_add_seq (ilist, tseq);
6385 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6388 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6389 if (x)
6390 gimplify_and_add (x, dlist);
6393 tree ref = build_outer_var_ref (var, ctx);
6394 x = unshare_expr (ivar);
6395 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6396 ref);
6397 gimplify_and_add (x, &llist[0]);
6399 ref = build_outer_var_ref (var, ctx);
6400 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6401 rvar);
6402 gimplify_and_add (x, &llist[3]);
6404 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6405 if (new_vard == new_var)
6406 SET_DECL_VALUE_EXPR (new_var, lvar);
6407 else
6408 SET_DECL_VALUE_EXPR (new_vard,
6409 build_fold_addr_expr (lvar));
6411 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6412 if (x)
6413 gimplify_and_add (x, &llist[1]);
6415 tree ivar2 = unshare_expr (lvar);
6416 TREE_OPERAND (ivar2, 1) = sctx.idx;
6417 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6418 if (x)
6419 gimplify_and_add (x, &llist[1]);
6421 if (rvar2)
6423 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6424 if (x)
6425 gimplify_and_add (x, &llist[1]);
6427 break;
6429 if (x)
6430 gimplify_and_add (x, &llist[0]);
6431 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6433 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6434 lower_omp (&tseq, ctx);
6435 gimple_seq_add_seq (&llist[0], tseq);
6437 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6438 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6439 lower_omp (&tseq, ctx);
6440 gimple_seq_add_seq (&llist[1], tseq);
6441 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6442 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6443 if (new_vard == new_var)
6444 SET_DECL_VALUE_EXPR (new_var, lvar);
6445 else
6446 SET_DECL_VALUE_EXPR (new_vard,
6447 build_fold_addr_expr (lvar));
6448 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6449 if (x)
6450 gimplify_and_add (x, &llist[1]);
6451 break;
6453 /* If this is a reference to constant size reduction var
6454 with placeholder, we haven't emitted the initializer
6455 for it because it is undesirable if SIMD arrays are used.
6456 But if they aren't used, we need to emit the deferred
6457 initialization now. */
6458 else if (omp_privatize_by_reference (var) && is_simd)
6459 handle_simd_reference (clause_loc, new_vard, ilist);
6461 tree lab2 = NULL_TREE;
6462 if (cond)
6464 gimple *g;
6465 if (!is_parallel_ctx (ctx))
6467 tree condv = create_tmp_var (boolean_type_node);
6468 tree m = build_simple_mem_ref (cond);
6469 g = gimple_build_assign (condv, m);
6470 gimple_seq_add_stmt (ilist, g);
6471 tree lab1
6472 = create_artificial_label (UNKNOWN_LOCATION);
6473 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6474 g = gimple_build_cond (NE_EXPR, condv,
6475 boolean_false_node,
6476 lab2, lab1);
6477 gimple_seq_add_stmt (ilist, g);
6478 gimple_seq_add_stmt (ilist,
6479 gimple_build_label (lab1));
6481 g = gimple_build_assign (build_simple_mem_ref (cond),
6482 boolean_true_node);
6483 gimple_seq_add_stmt (ilist, g);
6485 x = lang_hooks.decls.omp_clause_default_ctor
6486 (c, unshare_expr (new_var),
6487 cond ? NULL_TREE
6488 : build_outer_var_ref (var, ctx));
6489 if (x)
6490 gimplify_and_add (x, ilist);
6492 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6493 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6495 if (ctx->for_simd_scan_phase)
6496 goto do_dtor;
6497 if (x || (!is_simd
6498 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6500 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6501 gimple_add_tmp_var (nv);
6502 ctx->cb.decl_map->put (new_vard, nv);
6503 x = lang_hooks.decls.omp_clause_default_ctor
6504 (c, nv, build_outer_var_ref (var, ctx));
6505 if (x)
6506 gimplify_and_add (x, ilist);
6507 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6509 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6510 tree vexpr = nv;
6511 if (new_vard != new_var)
6512 vexpr = build_fold_addr_expr (nv);
6513 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6514 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6515 lower_omp (&tseq, ctx);
6516 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6517 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6518 gimple_seq_add_seq (ilist, tseq);
6520 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6521 if (is_simd && ctx->scan_exclusive)
6523 tree nv2
6524 = create_tmp_var_raw (TREE_TYPE (new_var));
6525 gimple_add_tmp_var (nv2);
6526 ctx->cb.decl_map->put (nv, nv2);
6527 x = lang_hooks.decls.omp_clause_default_ctor
6528 (c, nv2, build_outer_var_ref (var, ctx));
6529 gimplify_and_add (x, ilist);
6530 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6531 if (x)
6532 gimplify_and_add (x, dlist);
6534 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6535 if (x)
6536 gimplify_and_add (x, dlist);
6538 else if (is_simd
6539 && ctx->scan_exclusive
6540 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6542 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6543 gimple_add_tmp_var (nv2);
6544 ctx->cb.decl_map->put (new_vard, nv2);
6545 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6546 if (x)
6547 gimplify_and_add (x, dlist);
6549 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6550 goto do_dtor;
6553 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6555 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6556 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6557 && is_omp_target (ctx->stmt))
6559 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6560 tree oldv = NULL_TREE;
6561 gcc_assert (d);
6562 if (DECL_HAS_VALUE_EXPR_P (d))
6563 oldv = DECL_VALUE_EXPR (d);
6564 SET_DECL_VALUE_EXPR (d, new_vard);
6565 DECL_HAS_VALUE_EXPR_P (d) = 1;
6566 lower_omp (&tseq, ctx);
6567 if (oldv)
6568 SET_DECL_VALUE_EXPR (d, oldv);
6569 else
6571 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6572 DECL_HAS_VALUE_EXPR_P (d) = 0;
6575 else
6576 lower_omp (&tseq, ctx);
6577 gimple_seq_add_seq (ilist, tseq);
6579 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6580 if (is_simd)
6582 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6583 lower_omp (&tseq, ctx);
6584 gimple_seq_add_seq (dlist, tseq);
6585 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6587 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6588 if (cond)
6590 if (lab2)
6591 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6592 break;
6594 goto do_dtor;
6596 else
6598 x = omp_reduction_init (c, TREE_TYPE (new_var));
6599 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6600 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6602 if (cond)
6604 gimple *g;
6605 tree lab2 = NULL_TREE;
6606 /* GOMP_taskgroup_reduction_register memsets the whole
6607 array to zero. If the initializer is zero, we don't
6608 need to initialize it again, just mark it as ever
6609 used unconditionally, i.e. cond = true. */
6610 if (initializer_zerop (x))
6612 g = gimple_build_assign (build_simple_mem_ref (cond),
6613 boolean_true_node);
6614 gimple_seq_add_stmt (ilist, g);
6615 break;
6618 /* Otherwise, emit
6619 if (!cond) { cond = true; new_var = x; } */
6620 if (!is_parallel_ctx (ctx))
6622 tree condv = create_tmp_var (boolean_type_node);
6623 tree m = build_simple_mem_ref (cond);
6624 g = gimple_build_assign (condv, m);
6625 gimple_seq_add_stmt (ilist, g);
6626 tree lab1
6627 = create_artificial_label (UNKNOWN_LOCATION);
6628 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6629 g = gimple_build_cond (NE_EXPR, condv,
6630 boolean_false_node,
6631 lab2, lab1);
6632 gimple_seq_add_stmt (ilist, g);
6633 gimple_seq_add_stmt (ilist,
6634 gimple_build_label (lab1));
6636 g = gimple_build_assign (build_simple_mem_ref (cond),
6637 boolean_true_node);
6638 gimple_seq_add_stmt (ilist, g);
6639 gimplify_assign (new_var, x, ilist);
6640 if (lab2)
6641 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6642 break;
6645 /* reduction(-:var) sums up the partial results, so it
6646 acts identically to reduction(+:var). */
6647 if (code == MINUS_EXPR)
6648 code = PLUS_EXPR;
6650 bool is_truth_op
6651 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6652 tree new_vard = new_var;
6653 if (is_simd && omp_privatize_by_reference (var))
6655 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6656 new_vard = TREE_OPERAND (new_var, 0);
6657 gcc_assert (DECL_P (new_vard));
6659 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6660 if (is_simd
6661 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6662 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6663 rvarp = &rvar;
6664 if (is_simd
6665 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6666 ivar, lvar, rvarp,
6667 &rvar2))
6669 if (new_vard != new_var)
6671 SET_DECL_VALUE_EXPR (new_vard,
6672 build_fold_addr_expr (lvar));
6673 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6676 tree ref = build_outer_var_ref (var, ctx);
6678 if (rvarp)
6680 if (ctx->for_simd_scan_phase)
6681 break;
6682 gimplify_assign (ivar, ref, &llist[0]);
6683 ref = build_outer_var_ref (var, ctx);
6684 gimplify_assign (ref, rvar, &llist[3]);
6685 break;
6688 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6690 if (sctx.is_simt)
6692 if (!simt_lane)
6693 simt_lane = create_tmp_var (unsigned_type_node);
6694 x = build_call_expr_internal_loc
6695 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6696 TREE_TYPE (ivar), 2, ivar, simt_lane);
6697 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6698 gimplify_assign (ivar, x, &llist[2]);
6700 tree ivar2 = ivar;
6701 tree ref2 = ref;
6702 if (is_truth_op)
6704 tree zero = build_zero_cst (TREE_TYPE (ivar));
6705 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6706 boolean_type_node, ivar,
6707 zero);
6708 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6709 boolean_type_node, ref,
6710 zero);
6712 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6713 if (is_truth_op)
6714 x = fold_convert (TREE_TYPE (ref), x);
6715 ref = build_outer_var_ref (var, ctx);
6716 gimplify_assign (ref, x, &llist[1]);
6719 else
6721 lower_private_allocate (var, new_var, allocator,
6722 allocate_ptr, ilist, ctx,
6723 false, NULL_TREE);
6724 if (omp_privatize_by_reference (var) && is_simd)
6725 handle_simd_reference (clause_loc, new_vard, ilist);
6726 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6727 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6728 break;
6729 gimplify_assign (new_var, x, ilist);
6730 if (is_simd)
6732 tree ref = build_outer_var_ref (var, ctx);
6733 tree new_var2 = new_var;
6734 tree ref2 = ref;
6735 if (is_truth_op)
6737 tree zero = build_zero_cst (TREE_TYPE (new_var));
6738 new_var2
6739 = fold_build2_loc (clause_loc, NE_EXPR,
6740 boolean_type_node, new_var,
6741 zero);
6742 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6743 boolean_type_node, ref,
6744 zero);
6746 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6747 if (is_truth_op)
6748 x = fold_convert (TREE_TYPE (new_var), x);
6749 ref = build_outer_var_ref (var, ctx);
6750 gimplify_assign (ref, x, dlist);
6752 if (allocator)
6753 goto do_dtor;
6756 break;
6758 default:
6759 gcc_unreachable ();
6763 if (tskred_avar)
6765 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6766 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6769 if (known_eq (sctx.max_vf, 1U))
6771 sctx.is_simt = false;
6772 if (ctx->lastprivate_conditional_map)
6774 if (gimple_omp_for_combined_into_p (ctx->stmt))
6776 /* Signal to lower_omp_1 that it should use parent context. */
6777 ctx->combined_into_simd_safelen1 = true;
6778 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6779 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6780 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6782 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6783 omp_context *outer = ctx->outer;
6784 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6785 outer = outer->outer;
6786 tree *v = ctx->lastprivate_conditional_map->get (o);
6787 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6788 tree *pv = outer->lastprivate_conditional_map->get (po);
6789 *v = *pv;
6792 else
6794 /* When not vectorized, treat lastprivate(conditional:) like
6795 normal lastprivate, as there will be just one simd lane
6796 writing the privatized variable. */
6797 delete ctx->lastprivate_conditional_map;
6798 ctx->lastprivate_conditional_map = NULL;
6803 if (nonconst_simd_if)
6805 if (sctx.lane == NULL_TREE)
6807 sctx.idx = create_tmp_var (unsigned_type_node);
6808 sctx.lane = create_tmp_var (unsigned_type_node);
6810 /* FIXME: For now. */
6811 sctx.is_simt = false;
6814 if (sctx.lane || sctx.is_simt)
6816 uid = create_tmp_var (ptr_type_node, "simduid");
6817 /* Don't want uninit warnings on simduid, it is always uninitialized,
6818 but we use it not for the value, but for the DECL_UID only. */
6819 suppress_warning (uid, OPT_Wuninitialized);
6820 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6821 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6822 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6823 gimple_omp_for_set_clauses (ctx->stmt, c);
6825 /* Emit calls denoting privatized variables and initializing a pointer to
6826 structure that holds private variables as fields after ompdevlow pass. */
6827 if (sctx.is_simt)
6829 sctx.simt_eargs[0] = uid;
6830 gimple *g
6831 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6832 gimple_call_set_lhs (g, uid);
6833 gimple_seq_add_stmt (ilist, g);
6834 sctx.simt_eargs.release ();
6836 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6837 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6838 gimple_call_set_lhs (g, simtrec);
6839 gimple_seq_add_stmt (ilist, g);
6841 if (sctx.lane)
6843 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6844 2 + (nonconst_simd_if != NULL),
6845 uid, integer_zero_node,
6846 nonconst_simd_if);
6847 gimple_call_set_lhs (g, sctx.lane);
6848 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6849 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6850 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6851 build_int_cst (unsigned_type_node, 0));
6852 gimple_seq_add_stmt (ilist, g);
6853 if (sctx.lastlane)
6855 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6856 2, uid, sctx.lane);
6857 gimple_call_set_lhs (g, sctx.lastlane);
6858 gimple_seq_add_stmt (dlist, g);
6859 gimple_seq_add_seq (dlist, llist[3]);
6861 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6862 if (llist[2])
6864 tree simt_vf = create_tmp_var (unsigned_type_node);
6865 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6866 gimple_call_set_lhs (g, simt_vf);
6867 gimple_seq_add_stmt (dlist, g);
6869 tree t = build_int_cst (unsigned_type_node, 1);
6870 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6871 gimple_seq_add_stmt (dlist, g);
6873 t = build_int_cst (unsigned_type_node, 0);
6874 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6875 gimple_seq_add_stmt (dlist, g);
6877 tree body = create_artificial_label (UNKNOWN_LOCATION);
6878 tree header = create_artificial_label (UNKNOWN_LOCATION);
6879 tree end = create_artificial_label (UNKNOWN_LOCATION);
6880 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6881 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6883 gimple_seq_add_seq (dlist, llist[2]);
6885 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6886 gimple_seq_add_stmt (dlist, g);
6888 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6889 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6890 gimple_seq_add_stmt (dlist, g);
6892 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6894 for (int i = 0; i < 2; i++)
6895 if (llist[i])
6897 tree vf = create_tmp_var (unsigned_type_node);
6898 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6899 gimple_call_set_lhs (g, vf);
6900 gimple_seq *seq = i == 0 ? ilist : dlist;
6901 gimple_seq_add_stmt (seq, g);
6902 tree t = build_int_cst (unsigned_type_node, 0);
6903 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6904 gimple_seq_add_stmt (seq, g);
6905 tree body = create_artificial_label (UNKNOWN_LOCATION);
6906 tree header = create_artificial_label (UNKNOWN_LOCATION);
6907 tree end = create_artificial_label (UNKNOWN_LOCATION);
6908 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6909 gimple_seq_add_stmt (seq, gimple_build_label (body));
6910 gimple_seq_add_seq (seq, llist[i]);
6911 t = build_int_cst (unsigned_type_node, 1);
6912 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6913 gimple_seq_add_stmt (seq, g);
6914 gimple_seq_add_stmt (seq, gimple_build_label (header));
6915 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6916 gimple_seq_add_stmt (seq, g);
6917 gimple_seq_add_stmt (seq, gimple_build_label (end));
6920 if (sctx.is_simt)
6922 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6923 gimple *g
6924 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6925 gimple_seq_add_stmt (dlist, g);
6928 /* The copyin sequence is not to be executed by the main thread, since
6929 that would result in self-copies. Perhaps not visible to scalars,
6930 but it certainly is to C++ operator=. */
6931 if (copyin_seq)
6933 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6935 x = build2 (NE_EXPR, boolean_type_node, x,
6936 build_int_cst (TREE_TYPE (x), 0));
6937 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6938 gimplify_and_add (x, ilist);
6941 /* If any copyin variable is passed by reference, we must ensure the
6942 master thread doesn't modify it before it is copied over in all
6943 threads. Similarly for variables in both firstprivate and
6944 lastprivate clauses we need to ensure the lastprivate copying
6945 happens after firstprivate copying in all threads. And similarly
6946 for UDRs if initializer expression refers to omp_orig. */
6947 if (copyin_by_ref || lastprivate_firstprivate
6948 || (reduction_omp_orig_ref
6949 && !ctx->scan_inclusive
6950 && !ctx->scan_exclusive))
6952 /* Don't add any barrier for #pragma omp simd or
6953 #pragma omp distribute. */
6954 if (!is_task_ctx (ctx)
6955 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6956 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6957 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6960 /* If max_vf is non-zero, then we can use only a vectorization factor
6961 up to the max_vf we chose. So stick it into the safelen clause. */
6962 if (maybe_ne (sctx.max_vf, 0U))
6964 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6965 OMP_CLAUSE_SAFELEN);
6966 poly_uint64 safe_len;
6967 if (c == NULL_TREE
6968 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6969 && maybe_gt (safe_len, sctx.max_vf)))
6971 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6972 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6973 sctx.max_vf);
6974 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6975 gimple_omp_for_set_clauses (ctx->stmt, c);
6980 /* Create temporary variables for lastprivate(conditional:) implementation
6981 in context CTX with CLAUSES. */
6983 static void
6984 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6986 tree iter_type = NULL_TREE;
6987 tree cond_ptr = NULL_TREE;
6988 tree iter_var = NULL_TREE;
6989 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6990 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6991 tree next = *clauses;
6992 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6993 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6994 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6996 if (is_simd)
6998 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6999 gcc_assert (cc);
7000 if (iter_type == NULL_TREE)
7002 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
7003 iter_var = create_tmp_var_raw (iter_type);
7004 DECL_CONTEXT (iter_var) = current_function_decl;
7005 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7006 DECL_CHAIN (iter_var) = ctx->block_vars;
7007 ctx->block_vars = iter_var;
7008 tree c3
7009 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7010 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7011 OMP_CLAUSE_DECL (c3) = iter_var;
7012 OMP_CLAUSE_CHAIN (c3) = *clauses;
7013 *clauses = c3;
7014 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7016 next = OMP_CLAUSE_CHAIN (cc);
7017 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7018 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
7019 ctx->lastprivate_conditional_map->put (o, v);
7020 continue;
7022 if (iter_type == NULL)
7024 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
7026 struct omp_for_data fd;
7027 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
7028 NULL);
7029 iter_type = unsigned_type_for (fd.iter_type);
7031 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
7032 iter_type = unsigned_type_node;
7033 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
7034 if (c2)
7036 cond_ptr
7037 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7038 OMP_CLAUSE_DECL (c2) = cond_ptr;
7040 else
7042 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7043 DECL_CONTEXT (cond_ptr) = current_function_decl;
7044 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7045 DECL_CHAIN (cond_ptr) = ctx->block_vars;
7046 ctx->block_vars = cond_ptr;
7047 c2 = build_omp_clause (UNKNOWN_LOCATION,
7048 OMP_CLAUSE__CONDTEMP_);
7049 OMP_CLAUSE_DECL (c2) = cond_ptr;
7050 OMP_CLAUSE_CHAIN (c2) = *clauses;
7051 *clauses = c2;
7053 iter_var = create_tmp_var_raw (iter_type);
7054 DECL_CONTEXT (iter_var) = current_function_decl;
7055 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7056 DECL_CHAIN (iter_var) = ctx->block_vars;
7057 ctx->block_vars = iter_var;
7058 tree c3
7059 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7060 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7061 OMP_CLAUSE_DECL (c3) = iter_var;
7062 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7063 OMP_CLAUSE_CHAIN (c2) = c3;
7064 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7066 tree v = create_tmp_var_raw (iter_type);
7067 DECL_CONTEXT (v) = current_function_decl;
7068 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7069 DECL_CHAIN (v) = ctx->block_vars;
7070 ctx->block_vars = v;
7071 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7072 ctx->lastprivate_conditional_map->put (o, v);
7077 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7078 both parallel and workshare constructs. PREDICATE may be NULL if it's
7079 always true. BODY_P is the sequence to insert early initialization
7080 if needed, STMT_LIST is where the non-conditional lastprivate handling
7081 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7082 section. */
7084 static void
7085 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7086 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7087 omp_context *ctx)
7089 tree x, c, label = NULL, orig_clauses = clauses;
7090 bool par_clauses = false;
7091 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7092 unsigned HOST_WIDE_INT conditional_off = 0;
7093 gimple_seq post_stmt_list = NULL;
7095 /* Early exit if there are no lastprivate or linear clauses. */
7096 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7097 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7098 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7099 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7100 break;
7101 if (clauses == NULL)
7103 /* If this was a workshare clause, see if it had been combined
7104 with its parallel. In that case, look for the clauses on the
7105 parallel statement itself. */
7106 if (is_parallel_ctx (ctx))
7107 return;
7109 ctx = ctx->outer;
7110 if (ctx == NULL || !is_parallel_ctx (ctx))
7111 return;
7113 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7114 OMP_CLAUSE_LASTPRIVATE);
7115 if (clauses == NULL)
7116 return;
7117 par_clauses = true;
7120 bool maybe_simt = false;
7121 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7122 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7124 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7125 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7126 if (simduid)
7127 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7130 if (predicate)
7132 gcond *stmt;
7133 tree label_true, arm1, arm2;
7134 enum tree_code pred_code = TREE_CODE (predicate);
7136 label = create_artificial_label (UNKNOWN_LOCATION);
7137 label_true = create_artificial_label (UNKNOWN_LOCATION);
7138 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7140 arm1 = TREE_OPERAND (predicate, 0);
7141 arm2 = TREE_OPERAND (predicate, 1);
7142 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7143 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7145 else
7147 arm1 = predicate;
7148 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7149 arm2 = boolean_false_node;
7150 pred_code = NE_EXPR;
7152 if (maybe_simt)
7154 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7155 c = fold_convert (integer_type_node, c);
7156 simtcond = create_tmp_var (integer_type_node);
7157 gimplify_assign (simtcond, c, stmt_list);
7158 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7159 1, simtcond);
7160 c = create_tmp_var (integer_type_node);
7161 gimple_call_set_lhs (g, c);
7162 gimple_seq_add_stmt (stmt_list, g);
7163 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7164 label_true, label);
7166 else
7167 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7168 gimple_seq_add_stmt (stmt_list, stmt);
7169 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7172 tree cond_ptr = NULL_TREE;
7173 for (c = clauses; c ;)
7175 tree var, new_var;
7176 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7177 gimple_seq *this_stmt_list = stmt_list;
7178 tree lab2 = NULL_TREE;
7180 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7181 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7182 && ctx->lastprivate_conditional_map
7183 && !ctx->combined_into_simd_safelen1)
7185 gcc_assert (body_p);
7186 if (simduid)
7187 goto next;
7188 if (cond_ptr == NULL_TREE)
7190 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7191 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7193 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7194 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7195 tree v = *ctx->lastprivate_conditional_map->get (o);
7196 gimplify_assign (v, build_zero_cst (type), body_p);
7197 this_stmt_list = cstmt_list;
7198 tree mem;
7199 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7201 mem = build2 (MEM_REF, type, cond_ptr,
7202 build_int_cst (TREE_TYPE (cond_ptr),
7203 conditional_off));
7204 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7206 else
7207 mem = build4 (ARRAY_REF, type, cond_ptr,
7208 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7209 tree mem2 = copy_node (mem);
7210 gimple_seq seq = NULL;
7211 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7212 gimple_seq_add_seq (this_stmt_list, seq);
7213 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7214 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7215 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7216 gimple_seq_add_stmt (this_stmt_list, g);
7217 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7218 gimplify_assign (mem2, v, this_stmt_list);
7220 else if (predicate
7221 && ctx->combined_into_simd_safelen1
7222 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7223 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7224 && ctx->lastprivate_conditional_map)
7225 this_stmt_list = &post_stmt_list;
7227 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7228 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7229 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7231 var = OMP_CLAUSE_DECL (c);
7232 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7233 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7234 && is_taskloop_ctx (ctx))
7236 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7237 new_var = lookup_decl (var, ctx->outer);
7239 else
7241 new_var = lookup_decl (var, ctx);
7242 /* Avoid uninitialized warnings for lastprivate and
7243 for linear iterators. */
7244 if (predicate
7245 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7246 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7247 suppress_warning (new_var, OPT_Wuninitialized);
7250 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7252 tree val = DECL_VALUE_EXPR (new_var);
7253 if (TREE_CODE (val) == ARRAY_REF
7254 && VAR_P (TREE_OPERAND (val, 0))
7255 && lookup_attribute ("omp simd array",
7256 DECL_ATTRIBUTES (TREE_OPERAND (val,
7257 0))))
7259 if (lastlane == NULL)
7261 lastlane = create_tmp_var (unsigned_type_node);
7262 gcall *g
7263 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7264 2, simduid,
7265 TREE_OPERAND (val, 1));
7266 gimple_call_set_lhs (g, lastlane);
7267 gimple_seq_add_stmt (this_stmt_list, g);
7269 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7270 TREE_OPERAND (val, 0), lastlane,
7271 NULL_TREE, NULL_TREE);
7272 TREE_THIS_NOTRAP (new_var) = 1;
7275 else if (maybe_simt)
7277 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7278 ? DECL_VALUE_EXPR (new_var)
7279 : new_var);
7280 if (simtlast == NULL)
7282 simtlast = create_tmp_var (unsigned_type_node);
7283 gcall *g = gimple_build_call_internal
7284 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7285 gimple_call_set_lhs (g, simtlast);
7286 gimple_seq_add_stmt (this_stmt_list, g);
7288 x = build_call_expr_internal_loc
7289 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7290 TREE_TYPE (val), 2, val, simtlast);
7291 new_var = unshare_expr (new_var);
7292 gimplify_assign (new_var, x, this_stmt_list);
7293 new_var = unshare_expr (new_var);
7296 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7297 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7299 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7300 gimple_seq_add_seq (this_stmt_list,
7301 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7302 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7304 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7305 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7307 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7308 gimple_seq_add_seq (this_stmt_list,
7309 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7310 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7313 x = NULL_TREE;
7314 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7315 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7316 && is_taskloop_ctx (ctx))
7318 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7319 ctx->outer->outer);
7320 if (is_global_var (ovar))
7321 x = ovar;
7323 if (!x)
7324 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7325 if (omp_privatize_by_reference (var))
7326 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7327 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7328 gimplify_and_add (x, this_stmt_list);
7330 if (lab2)
7331 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7334 next:
7335 c = OMP_CLAUSE_CHAIN (c);
7336 if (c == NULL && !par_clauses)
7338 /* If this was a workshare clause, see if it had been combined
7339 with its parallel. In that case, continue looking for the
7340 clauses also on the parallel statement itself. */
7341 if (is_parallel_ctx (ctx))
7342 break;
7344 ctx = ctx->outer;
7345 if (ctx == NULL || !is_parallel_ctx (ctx))
7346 break;
7348 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7349 OMP_CLAUSE_LASTPRIVATE);
7350 par_clauses = true;
7354 if (label)
7355 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7356 gimple_seq_add_seq (stmt_list, post_stmt_list);
7359 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7360 (which might be a placeholder). INNER is true if this is an inner
7361 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7362 join markers. Generate the before-loop forking sequence in
7363 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7364 general form of these sequences is
7366 GOACC_REDUCTION_SETUP
7367 GOACC_FORK
7368 GOACC_REDUCTION_INIT
7370 GOACC_REDUCTION_FINI
7371 GOACC_JOIN
7372 GOACC_REDUCTION_TEARDOWN. */
7374 static void
7375 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7376 gcall *fork, gcall *private_marker, gcall *join,
7377 gimple_seq *fork_seq, gimple_seq *join_seq,
7378 omp_context *ctx)
7380 gimple_seq before_fork = NULL;
7381 gimple_seq after_fork = NULL;
7382 gimple_seq before_join = NULL;
7383 gimple_seq after_join = NULL;
7384 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7385 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7386 unsigned offset = 0;
7388 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7389 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7391 /* No 'reduction' clauses on OpenACC 'kernels'. */
7392 gcc_checking_assert (!is_oacc_kernels (ctx));
7393 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7394 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7396 tree orig = OMP_CLAUSE_DECL (c);
7397 tree var = maybe_lookup_decl (orig, ctx);
7398 tree ref_to_res = NULL_TREE;
7399 tree incoming, outgoing, v1, v2, v3;
7400 bool is_private = false;
7402 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7403 if (rcode == MINUS_EXPR)
7404 rcode = PLUS_EXPR;
7405 else if (rcode == TRUTH_ANDIF_EXPR)
7406 rcode = BIT_AND_EXPR;
7407 else if (rcode == TRUTH_ORIF_EXPR)
7408 rcode = BIT_IOR_EXPR;
7409 tree op = build_int_cst (unsigned_type_node, rcode);
7411 if (!var)
7412 var = orig;
7414 incoming = outgoing = var;
7416 if (!inner)
7418 /* See if an outer construct also reduces this variable. */
7419 omp_context *outer = ctx;
7421 while (omp_context *probe = outer->outer)
7423 enum gimple_code type = gimple_code (probe->stmt);
7424 tree cls;
7426 switch (type)
7428 case GIMPLE_OMP_FOR:
7429 cls = gimple_omp_for_clauses (probe->stmt);
7430 break;
7432 case GIMPLE_OMP_TARGET:
7433 /* No 'reduction' clauses inside OpenACC 'kernels'
7434 regions. */
7435 gcc_checking_assert (!is_oacc_kernels (probe));
7437 if (!is_gimple_omp_offloaded (probe->stmt))
7438 goto do_lookup;
7440 cls = gimple_omp_target_clauses (probe->stmt);
7441 break;
7443 default:
7444 goto do_lookup;
7447 outer = probe;
7448 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7449 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7450 && orig == OMP_CLAUSE_DECL (cls))
7452 incoming = outgoing = lookup_decl (orig, probe);
7453 goto has_outer_reduction;
7455 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7456 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7457 && orig == OMP_CLAUSE_DECL (cls))
7459 is_private = true;
7460 goto do_lookup;
7464 do_lookup:
7465 /* This is the outermost construct with this reduction,
7466 see if there's a mapping for it. */
7467 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7468 && maybe_lookup_field (orig, outer) && !is_private)
7470 ref_to_res = build_receiver_ref (orig, false, outer);
7471 if (omp_privatize_by_reference (orig))
7472 ref_to_res = build_simple_mem_ref (ref_to_res);
7474 tree type = TREE_TYPE (var);
7475 if (POINTER_TYPE_P (type))
7476 type = TREE_TYPE (type);
7478 outgoing = var;
7479 incoming = omp_reduction_init_op (loc, rcode, type);
7481 else
7483 /* Try to look at enclosing contexts for reduction var,
7484 use original if no mapping found. */
7485 tree t = NULL_TREE;
7486 omp_context *c = ctx->outer;
7487 while (c && !t)
7489 t = maybe_lookup_decl (orig, c);
7490 c = c->outer;
7492 incoming = outgoing = (t ? t : orig);
7495 has_outer_reduction:;
7498 if (!ref_to_res)
7499 ref_to_res = integer_zero_node;
7501 if (omp_privatize_by_reference (orig))
7503 tree type = TREE_TYPE (var);
7504 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7506 if (!inner)
7508 tree x = create_tmp_var (TREE_TYPE (type), id);
7509 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7512 v1 = create_tmp_var (type, id);
7513 v2 = create_tmp_var (type, id);
7514 v3 = create_tmp_var (type, id);
7516 gimplify_assign (v1, var, fork_seq);
7517 gimplify_assign (v2, var, fork_seq);
7518 gimplify_assign (v3, var, fork_seq);
7520 var = build_simple_mem_ref (var);
7521 v1 = build_simple_mem_ref (v1);
7522 v2 = build_simple_mem_ref (v2);
7523 v3 = build_simple_mem_ref (v3);
7524 outgoing = build_simple_mem_ref (outgoing);
7526 if (!TREE_CONSTANT (incoming))
7527 incoming = build_simple_mem_ref (incoming);
7529 else
7530 v1 = v2 = v3 = var;
7532 /* Determine position in reduction buffer, which may be used
7533 by target. The parser has ensured that this is not a
7534 variable-sized type. */
7535 fixed_size_mode mode
7536 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7537 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7538 offset = (offset + align - 1) & ~(align - 1);
7539 tree off = build_int_cst (sizetype, offset);
7540 offset += GET_MODE_SIZE (mode);
7542 if (!init_code)
7544 init_code = build_int_cst (integer_type_node,
7545 IFN_GOACC_REDUCTION_INIT);
7546 fini_code = build_int_cst (integer_type_node,
7547 IFN_GOACC_REDUCTION_FINI);
7548 setup_code = build_int_cst (integer_type_node,
7549 IFN_GOACC_REDUCTION_SETUP);
7550 teardown_code = build_int_cst (integer_type_node,
7551 IFN_GOACC_REDUCTION_TEARDOWN);
7554 tree setup_call
7555 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7556 TREE_TYPE (var), 6, setup_code,
7557 unshare_expr (ref_to_res),
7558 incoming, level, op, off);
7559 tree init_call
7560 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7561 TREE_TYPE (var), 6, init_code,
7562 unshare_expr (ref_to_res),
7563 v1, level, op, off);
7564 tree fini_call
7565 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7566 TREE_TYPE (var), 6, fini_code,
7567 unshare_expr (ref_to_res),
7568 v2, level, op, off);
7569 tree teardown_call
7570 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7571 TREE_TYPE (var), 6, teardown_code,
7572 ref_to_res, v3, level, op, off);
7574 gimplify_assign (v1, setup_call, &before_fork);
7575 gimplify_assign (v2, init_call, &after_fork);
7576 gimplify_assign (v3, fini_call, &before_join);
7577 gimplify_assign (outgoing, teardown_call, &after_join);
7580 /* Now stitch things together. */
7581 gimple_seq_add_seq (fork_seq, before_fork);
7582 if (private_marker)
7583 gimple_seq_add_stmt (fork_seq, private_marker);
7584 if (fork)
7585 gimple_seq_add_stmt (fork_seq, fork);
7586 gimple_seq_add_seq (fork_seq, after_fork);
7588 gimple_seq_add_seq (join_seq, before_join);
7589 if (join)
7590 gimple_seq_add_stmt (join_seq, join);
7591 gimple_seq_add_seq (join_seq, after_join);
7594 /* Generate code to implement the REDUCTION clauses, append it
7595 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7596 that should be emitted also inside of the critical section,
7597 in that case clear *CLIST afterwards, otherwise leave it as is
7598 and let the caller emit it itself. */
7600 static void
7601 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7602 gimple_seq *clist, omp_context *ctx)
7604 gimple_seq sub_seq = NULL;
7605 gimple *stmt;
7606 tree x, c;
7607 int count = 0;
7609 /* OpenACC loop reductions are handled elsewhere. */
7610 if (is_gimple_omp_oacc (ctx->stmt))
7611 return;
7613 /* SIMD reductions are handled in lower_rec_input_clauses. */
7614 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7615 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7616 return;
7618 /* inscan reductions are handled elsewhere. */
7619 if (ctx->scan_inclusive || ctx->scan_exclusive)
7620 return;
7622 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7623 update in that case, otherwise use a lock. */
7624 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7625 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7626 && !OMP_CLAUSE_REDUCTION_TASK (c))
7628 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7629 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7631 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7632 count = -1;
7633 break;
7635 count++;
7638 if (count == 0)
7639 return;
7641 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7643 tree var, ref, new_var, orig_var;
7644 enum tree_code code;
7645 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7647 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7648 || OMP_CLAUSE_REDUCTION_TASK (c))
7649 continue;
7651 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7652 orig_var = var = OMP_CLAUSE_DECL (c);
7653 if (TREE_CODE (var) == MEM_REF)
7655 var = TREE_OPERAND (var, 0);
7656 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7657 var = TREE_OPERAND (var, 0);
7658 if (TREE_CODE (var) == ADDR_EXPR)
7659 var = TREE_OPERAND (var, 0);
7660 else
7662 /* If this is a pointer or referenced based array
7663 section, the var could be private in the outer
7664 context e.g. on orphaned loop construct. Pretend this
7665 is private variable's outer reference. */
7666 ccode = OMP_CLAUSE_PRIVATE;
7667 if (TREE_CODE (var) == INDIRECT_REF)
7668 var = TREE_OPERAND (var, 0);
7670 orig_var = var;
7671 if (is_variable_sized (var))
7673 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7674 var = DECL_VALUE_EXPR (var);
7675 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7676 var = TREE_OPERAND (var, 0);
7677 gcc_assert (DECL_P (var));
7680 new_var = lookup_decl (var, ctx);
7681 if (var == OMP_CLAUSE_DECL (c)
7682 && omp_privatize_by_reference (var))
7683 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7684 ref = build_outer_var_ref (var, ctx, ccode);
7685 code = OMP_CLAUSE_REDUCTION_CODE (c);
7687 /* reduction(-:var) sums up the partial results, so it acts
7688 identically to reduction(+:var). */
7689 if (code == MINUS_EXPR)
7690 code = PLUS_EXPR;
7692 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7693 if (count == 1)
7695 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7697 addr = save_expr (addr);
7698 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7699 tree new_var2 = new_var;
7700 tree ref2 = ref;
7701 if (is_truth_op)
7703 tree zero = build_zero_cst (TREE_TYPE (new_var));
7704 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7705 boolean_type_node, new_var, zero);
7706 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7707 ref, zero);
7709 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7710 new_var2);
7711 if (is_truth_op)
7712 x = fold_convert (TREE_TYPE (new_var), x);
7713 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7714 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7715 gimplify_and_add (x, stmt_seqp);
7716 return;
7718 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7720 tree d = OMP_CLAUSE_DECL (c);
7721 tree type = TREE_TYPE (d);
7722 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7723 tree i = create_tmp_var (TREE_TYPE (v));
7724 tree ptype = build_pointer_type (TREE_TYPE (type));
7725 tree bias = TREE_OPERAND (d, 1);
7726 d = TREE_OPERAND (d, 0);
7727 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7729 tree b = TREE_OPERAND (d, 1);
7730 b = maybe_lookup_decl (b, ctx);
7731 if (b == NULL)
7733 b = TREE_OPERAND (d, 1);
7734 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7736 if (integer_zerop (bias))
7737 bias = b;
7738 else
7740 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7741 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7742 TREE_TYPE (b), b, bias);
7744 d = TREE_OPERAND (d, 0);
7746 /* For ref build_outer_var_ref already performs this, so
7747 only new_var needs a dereference. */
7748 if (TREE_CODE (d) == INDIRECT_REF)
7750 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7751 gcc_assert (omp_privatize_by_reference (var)
7752 && var == orig_var);
7754 else if (TREE_CODE (d) == ADDR_EXPR)
7756 if (orig_var == var)
7758 new_var = build_fold_addr_expr (new_var);
7759 ref = build_fold_addr_expr (ref);
7762 else
7764 gcc_assert (orig_var == var);
7765 if (omp_privatize_by_reference (var))
7766 ref = build_fold_addr_expr (ref);
7768 if (DECL_P (v))
7770 tree t = maybe_lookup_decl (v, ctx);
7771 if (t)
7772 v = t;
7773 else
7774 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7775 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7777 if (!integer_zerop (bias))
7779 bias = fold_convert_loc (clause_loc, sizetype, bias);
7780 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7781 TREE_TYPE (new_var), new_var,
7782 unshare_expr (bias));
7783 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7784 TREE_TYPE (ref), ref, bias);
7786 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7787 ref = fold_convert_loc (clause_loc, ptype, ref);
7788 tree m = create_tmp_var (ptype);
7789 gimplify_assign (m, new_var, stmt_seqp);
7790 new_var = m;
7791 m = create_tmp_var (ptype);
7792 gimplify_assign (m, ref, stmt_seqp);
7793 ref = m;
7794 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7795 tree body = create_artificial_label (UNKNOWN_LOCATION);
7796 tree end = create_artificial_label (UNKNOWN_LOCATION);
7797 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7798 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7799 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7800 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7802 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7803 tree decl_placeholder
7804 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7805 SET_DECL_VALUE_EXPR (placeholder, out);
7806 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7807 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7808 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7809 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7810 gimple_seq_add_seq (&sub_seq,
7811 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7812 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7813 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7814 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7816 else
7818 tree out2 = out;
7819 tree priv2 = priv;
7820 if (is_truth_op)
7822 tree zero = build_zero_cst (TREE_TYPE (out));
7823 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7824 boolean_type_node, out, zero);
7825 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7826 boolean_type_node, priv, zero);
7828 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7829 if (is_truth_op)
7830 x = fold_convert (TREE_TYPE (out), x);
7831 out = unshare_expr (out);
7832 gimplify_assign (out, x, &sub_seq);
7834 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7835 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7836 gimple_seq_add_stmt (&sub_seq, g);
7837 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7838 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7839 gimple_seq_add_stmt (&sub_seq, g);
7840 g = gimple_build_assign (i, PLUS_EXPR, i,
7841 build_int_cst (TREE_TYPE (i), 1));
7842 gimple_seq_add_stmt (&sub_seq, g);
7843 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7844 gimple_seq_add_stmt (&sub_seq, g);
7845 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7847 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7849 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7851 if (omp_privatize_by_reference (var)
7852 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7853 TREE_TYPE (ref)))
7854 ref = build_fold_addr_expr_loc (clause_loc, ref);
7855 SET_DECL_VALUE_EXPR (placeholder, ref);
7856 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7857 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7858 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7859 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7860 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7862 else
7864 tree new_var2 = new_var;
7865 tree ref2 = ref;
7866 if (is_truth_op)
7868 tree zero = build_zero_cst (TREE_TYPE (new_var));
7869 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7870 boolean_type_node, new_var, zero);
7871 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7872 ref, zero);
7874 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7875 if (is_truth_op)
7876 x = fold_convert (TREE_TYPE (new_var), x);
7877 ref = build_outer_var_ref (var, ctx);
7878 gimplify_assign (ref, x, &sub_seq);
7882 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7884 gimple_seq_add_stmt (stmt_seqp, stmt);
7886 gimple_seq_add_seq (stmt_seqp, sub_seq);
7888 if (clist)
7890 gimple_seq_add_seq (stmt_seqp, *clist);
7891 *clist = NULL;
7894 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7896 gimple_seq_add_stmt (stmt_seqp, stmt);
7900 /* Generate code to implement the COPYPRIVATE clauses. */
7902 static void
7903 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7904 omp_context *ctx)
7906 tree c;
7908 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7910 tree var, new_var, ref, x;
7911 bool by_ref;
7912 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7914 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7915 continue;
7917 var = OMP_CLAUSE_DECL (c);
7918 by_ref = use_pointer_for_field (var, NULL);
7920 ref = build_sender_ref (var, ctx);
7921 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7922 if (by_ref)
7924 x = build_fold_addr_expr_loc (clause_loc, new_var);
7925 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7927 gimplify_assign (ref, x, slist);
7929 ref = build_receiver_ref (var, false, ctx);
7930 if (by_ref)
7932 ref = fold_convert_loc (clause_loc,
7933 build_pointer_type (TREE_TYPE (new_var)),
7934 ref);
7935 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7937 if (omp_privatize_by_reference (var))
7939 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7940 ref = build_simple_mem_ref_loc (clause_loc, ref);
7941 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7943 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7944 gimplify_and_add (x, rlist);
7949 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7950 and REDUCTION from the sender (aka parent) side. */
7952 static void
7953 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7954 omp_context *ctx)
7956 tree c, t;
7957 int ignored_looptemp = 0;
7958 bool is_taskloop = false;
7960 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7961 by GOMP_taskloop. */
7962 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7964 ignored_looptemp = 2;
7965 is_taskloop = true;
7968 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7970 tree val, ref, x, var;
7971 bool by_ref, do_in = false, do_out = false;
7972 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7974 switch (OMP_CLAUSE_CODE (c))
7976 case OMP_CLAUSE_PRIVATE:
7977 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7978 break;
7979 continue;
7980 case OMP_CLAUSE_FIRSTPRIVATE:
7981 case OMP_CLAUSE_COPYIN:
7982 case OMP_CLAUSE_LASTPRIVATE:
7983 case OMP_CLAUSE_IN_REDUCTION:
7984 case OMP_CLAUSE__REDUCTEMP_:
7985 break;
7986 case OMP_CLAUSE_REDUCTION:
7987 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7988 continue;
7989 break;
7990 case OMP_CLAUSE_SHARED:
7991 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7992 break;
7993 continue;
7994 case OMP_CLAUSE__LOOPTEMP_:
7995 if (ignored_looptemp)
7997 ignored_looptemp--;
7998 continue;
8000 break;
8001 default:
8002 continue;
8005 val = OMP_CLAUSE_DECL (c);
8006 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8007 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
8008 && TREE_CODE (val) == MEM_REF)
8010 val = TREE_OPERAND (val, 0);
8011 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
8012 val = TREE_OPERAND (val, 0);
8013 if (TREE_CODE (val) == INDIRECT_REF
8014 || TREE_CODE (val) == ADDR_EXPR)
8015 val = TREE_OPERAND (val, 0);
8016 if (is_variable_sized (val))
8017 continue;
8020 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8021 outer taskloop region. */
8022 omp_context *ctx_for_o = ctx;
8023 if (is_taskloop
8024 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8025 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8026 ctx_for_o = ctx->outer;
8028 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
8030 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8031 && is_global_var (var)
8032 && (val == OMP_CLAUSE_DECL (c)
8033 || !is_task_ctx (ctx)
8034 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8035 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8036 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8037 != POINTER_TYPE)))))
8038 continue;
8040 t = omp_member_access_dummy_var (var);
8041 if (t)
8043 var = DECL_VALUE_EXPR (var);
8044 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
8045 if (o != t)
8046 var = unshare_and_remap (var, t, o);
8047 else
8048 var = unshare_expr (var);
8051 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8053 /* Handle taskloop firstprivate/lastprivate, where the
8054 lastprivate on GIMPLE_OMP_TASK is represented as
8055 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8056 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
8057 x = omp_build_component_ref (ctx->sender_decl, f);
8058 if (use_pointer_for_field (val, ctx))
8059 var = build_fold_addr_expr (var);
8060 gimplify_assign (x, var, ilist);
8061 DECL_ABSTRACT_ORIGIN (f) = NULL;
8062 continue;
8065 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8066 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8067 || val == OMP_CLAUSE_DECL (c))
8068 && is_variable_sized (val))
8069 continue;
8070 by_ref = use_pointer_for_field (val, NULL);
8072 switch (OMP_CLAUSE_CODE (c))
8074 case OMP_CLAUSE_FIRSTPRIVATE:
8075 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8076 && !by_ref
8077 && is_task_ctx (ctx))
8078 suppress_warning (var);
8079 do_in = true;
8080 break;
8082 case OMP_CLAUSE_PRIVATE:
8083 case OMP_CLAUSE_COPYIN:
8084 case OMP_CLAUSE__LOOPTEMP_:
8085 case OMP_CLAUSE__REDUCTEMP_:
8086 do_in = true;
8087 break;
8089 case OMP_CLAUSE_LASTPRIVATE:
8090 if (by_ref || omp_privatize_by_reference (val))
8092 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8093 continue;
8094 do_in = true;
8096 else
8098 do_out = true;
8099 if (lang_hooks.decls.omp_private_outer_ref (val))
8100 do_in = true;
8102 break;
8104 case OMP_CLAUSE_REDUCTION:
8105 case OMP_CLAUSE_IN_REDUCTION:
8106 do_in = true;
8107 if (val == OMP_CLAUSE_DECL (c))
8109 if (is_task_ctx (ctx))
8110 by_ref = use_pointer_for_field (val, ctx);
8111 else
8112 do_out = !(by_ref || omp_privatize_by_reference (val));
8114 else
8115 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8116 break;
8118 default:
8119 gcc_unreachable ();
8122 if (do_in)
8124 ref = build_sender_ref (val, ctx);
8125 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8126 gimplify_assign (ref, x, ilist);
8127 if (is_task_ctx (ctx))
8128 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8131 if (do_out)
8133 ref = build_sender_ref (val, ctx);
8134 gimplify_assign (var, ref, olist);
8139 /* Generate code to implement SHARED from the sender (aka parent)
8140 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8141 list things that got automatically shared. */
8143 static void
8144 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8146 tree var, ovar, nvar, t, f, x, record_type;
8148 if (ctx->record_type == NULL)
8149 return;
8151 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8152 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8154 ovar = DECL_ABSTRACT_ORIGIN (f);
8155 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8156 continue;
8158 nvar = maybe_lookup_decl (ovar, ctx);
8159 if (!nvar
8160 || !DECL_HAS_VALUE_EXPR_P (nvar)
8161 || (ctx->allocate_map
8162 && ctx->allocate_map->get (ovar)))
8163 continue;
8165 /* If CTX is a nested parallel directive. Find the immediately
8166 enclosing parallel or workshare construct that contains a
8167 mapping for OVAR. */
8168 var = lookup_decl_in_outer_ctx (ovar, ctx);
8170 t = omp_member_access_dummy_var (var);
8171 if (t)
8173 var = DECL_VALUE_EXPR (var);
8174 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8175 if (o != t)
8176 var = unshare_and_remap (var, t, o);
8177 else
8178 var = unshare_expr (var);
8181 if (use_pointer_for_field (ovar, ctx))
8183 x = build_sender_ref (ovar, ctx);
8184 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8185 && TREE_TYPE (f) == TREE_TYPE (ovar))
8187 gcc_assert (is_parallel_ctx (ctx)
8188 && DECL_ARTIFICIAL (ovar));
8189 /* _condtemp_ clause. */
8190 var = build_constructor (TREE_TYPE (x), NULL);
8192 else
8193 var = build_fold_addr_expr (var);
8194 gimplify_assign (x, var, ilist);
8196 else
8198 x = build_sender_ref (ovar, ctx);
8199 gimplify_assign (x, var, ilist);
8201 if (!TREE_READONLY (var)
8202 /* We don't need to receive a new reference to a result
8203 or parm decl. In fact we may not store to it as we will
8204 invalidate any pending RSO and generate wrong gimple
8205 during inlining. */
8206 && !((TREE_CODE (var) == RESULT_DECL
8207 || TREE_CODE (var) == PARM_DECL)
8208 && DECL_BY_REFERENCE (var)))
8210 x = build_sender_ref (ovar, ctx);
8211 gimplify_assign (var, x, olist);
8217 /* Emit an OpenACC head marker call, encapulating the partitioning and
8218 other information that must be processed by the target compiler.
8219 Return the maximum number of dimensions the associated loop might
8220 be partitioned over. */
8222 static unsigned
8223 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8224 gimple_seq *seq, omp_context *ctx)
8226 unsigned levels = 0;
8227 unsigned tag = 0;
8228 tree gang_static = NULL_TREE;
8229 auto_vec<tree, 5> args;
8231 args.quick_push (build_int_cst
8232 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8233 args.quick_push (ddvar);
8234 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8236 switch (OMP_CLAUSE_CODE (c))
8238 case OMP_CLAUSE_GANG:
8239 tag |= OLF_DIM_GANG;
8240 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8241 /* static:* is represented by -1, and we can ignore it, as
8242 scheduling is always static. */
8243 if (gang_static && integer_minus_onep (gang_static))
8244 gang_static = NULL_TREE;
8245 levels++;
8246 break;
8248 case OMP_CLAUSE_WORKER:
8249 tag |= OLF_DIM_WORKER;
8250 levels++;
8251 break;
8253 case OMP_CLAUSE_VECTOR:
8254 tag |= OLF_DIM_VECTOR;
8255 levels++;
8256 break;
8258 case OMP_CLAUSE_SEQ:
8259 tag |= OLF_SEQ;
8260 break;
8262 case OMP_CLAUSE_AUTO:
8263 tag |= OLF_AUTO;
8264 break;
8266 case OMP_CLAUSE_INDEPENDENT:
8267 tag |= OLF_INDEPENDENT;
8268 break;
8270 case OMP_CLAUSE_TILE:
8271 tag |= OLF_TILE;
8272 break;
8274 case OMP_CLAUSE_REDUCTION:
8275 tag |= OLF_REDUCTION;
8276 break;
8278 default:
8279 continue;
8283 if (gang_static)
8285 if (DECL_P (gang_static))
8286 gang_static = build_outer_var_ref (gang_static, ctx);
8287 tag |= OLF_GANG_STATIC;
8290 omp_context *tgt = enclosing_target_ctx (ctx);
8291 if (!tgt || is_oacc_parallel_or_serial (tgt))
8293 else if (is_oacc_kernels (tgt))
8294 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8295 gcc_unreachable ();
8296 else if (is_oacc_kernels_decomposed_part (tgt))
8298 else
8299 gcc_unreachable ();
8301 /* In a parallel region, loops are implicitly INDEPENDENT. */
8302 if (!tgt || is_oacc_parallel_or_serial (tgt))
8303 tag |= OLF_INDEPENDENT;
8305 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8306 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8307 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8309 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8310 gcc_assert (!(tag & OLF_AUTO));
8313 if (tag & OLF_TILE)
8314 /* Tiling could use all 3 levels. */
8315 levels = 3;
8316 else
8318 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8319 Ensure at least one level, or 2 for possible auto
8320 partitioning */
8321 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8322 << OLF_DIM_BASE) | OLF_SEQ));
8324 if (levels < 1u + maybe_auto)
8325 levels = 1u + maybe_auto;
8328 args.quick_push (build_int_cst (integer_type_node, levels));
8329 args.quick_push (build_int_cst (integer_type_node, tag));
8330 if (gang_static)
8331 args.quick_push (gang_static);
8333 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8334 gimple_set_location (call, loc);
8335 gimple_set_lhs (call, ddvar);
8336 gimple_seq_add_stmt (seq, call);
8338 return levels;
8341 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8342 partitioning level of the enclosed region. */
8344 static void
8345 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8346 tree tofollow, gimple_seq *seq)
8348 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8349 : IFN_UNIQUE_OACC_TAIL_MARK);
8350 tree marker = build_int_cst (integer_type_node, marker_kind);
8351 int nargs = 2 + (tofollow != NULL_TREE);
8352 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8353 marker, ddvar, tofollow);
8354 gimple_set_location (call, loc);
8355 gimple_set_lhs (call, ddvar);
8356 gimple_seq_add_stmt (seq, call);
8359 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8360 the loop clauses, from which we extract reductions. Initialize
8361 HEAD and TAIL. */
8363 static void
8364 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8365 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8367 bool inner = false;
8368 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8369 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8371 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8373 if (private_marker)
8375 gimple_set_location (private_marker, loc);
8376 gimple_call_set_lhs (private_marker, ddvar);
8377 gimple_call_set_arg (private_marker, 1, ddvar);
8380 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8381 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8383 gcc_assert (count);
8384 for (unsigned done = 1; count; count--, done++)
8386 gimple_seq fork_seq = NULL;
8387 gimple_seq join_seq = NULL;
8389 tree place = build_int_cst (integer_type_node, -1);
8390 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8391 fork_kind, ddvar, place);
8392 gimple_set_location (fork, loc);
8393 gimple_set_lhs (fork, ddvar);
8395 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8396 join_kind, ddvar, place);
8397 gimple_set_location (join, loc);
8398 gimple_set_lhs (join, ddvar);
8400 /* Mark the beginning of this level sequence. */
8401 if (inner)
8402 lower_oacc_loop_marker (loc, ddvar, true,
8403 build_int_cst (integer_type_node, count),
8404 &fork_seq);
8405 lower_oacc_loop_marker (loc, ddvar, false,
8406 build_int_cst (integer_type_node, done),
8407 &join_seq);
8409 lower_oacc_reductions (loc, clauses, place, inner,
8410 fork, (count == 1) ? private_marker : NULL,
8411 join, &fork_seq, &join_seq, ctx);
8413 /* Append this level to head. */
8414 gimple_seq_add_seq (head, fork_seq);
8415 /* Prepend it to tail. */
8416 gimple_seq_add_seq (&join_seq, *tail);
8417 *tail = join_seq;
8419 inner = true;
8422 /* Mark the end of the sequence. */
8423 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8424 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8427 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8428 catch handler and return it. This prevents programs from violating the
8429 structured block semantics with throws. */
8431 static gimple_seq
8432 maybe_catch_exception (gimple_seq body)
8434 gimple *g;
8435 tree decl;
8437 if (!flag_exceptions)
8438 return body;
8440 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8441 decl = lang_hooks.eh_protect_cleanup_actions ();
8442 else
8443 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8445 g = gimple_build_eh_must_not_throw (decl);
8446 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8447 GIMPLE_TRY_CATCH);
8449 return gimple_seq_alloc_with_stmt (g);
8453 /* Routines to lower OMP directives into OMP-GIMPLE. */
8455 /* If ctx is a worksharing context inside of a cancellable parallel
8456 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8457 and conditional branch to parallel's cancel_label to handle
8458 cancellation in the implicit barrier. */
8460 static void
8461 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8462 gimple_seq *body)
8464 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8465 if (gimple_omp_return_nowait_p (omp_return))
8466 return;
8467 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8468 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8469 && outer->cancellable)
8471 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8472 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8473 tree lhs = create_tmp_var (c_bool_type);
8474 gimple_omp_return_set_lhs (omp_return, lhs);
8475 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8476 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8477 fold_convert (c_bool_type,
8478 boolean_false_node),
8479 outer->cancel_label, fallthru_label);
8480 gimple_seq_add_stmt (body, g);
8481 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8483 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8484 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8485 return;
8488 /* Find the first task_reduction or reduction clause or return NULL
8489 if there are none. */
8491 static inline tree
8492 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8493 enum omp_clause_code ccode)
8495 while (1)
8497 clauses = omp_find_clause (clauses, ccode);
8498 if (clauses == NULL_TREE)
8499 return NULL_TREE;
8500 if (ccode != OMP_CLAUSE_REDUCTION
8501 || code == OMP_TASKLOOP
8502 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8503 return clauses;
8504 clauses = OMP_CLAUSE_CHAIN (clauses);
8508 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8509 gimple_seq *, gimple_seq *);
8511 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8512 CTX is the enclosing OMP context for the current statement. */
8514 static void
8515 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8517 tree block, control;
8518 gimple_stmt_iterator tgsi;
8519 gomp_sections *stmt;
8520 gimple *t;
8521 gbind *new_stmt, *bind;
8522 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8524 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8526 push_gimplify_context ();
8528 dlist = NULL;
8529 ilist = NULL;
8531 tree rclauses
8532 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8533 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8534 tree rtmp = NULL_TREE;
8535 if (rclauses)
8537 tree type = build_pointer_type (pointer_sized_int_node);
8538 tree temp = create_tmp_var (type);
8539 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8540 OMP_CLAUSE_DECL (c) = temp;
8541 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8542 gimple_omp_sections_set_clauses (stmt, c);
8543 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8544 gimple_omp_sections_clauses (stmt),
8545 &ilist, &tred_dlist);
8546 rclauses = c;
8547 rtmp = make_ssa_name (type);
8548 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8551 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8552 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8554 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8555 &ilist, &dlist, ctx, NULL);
8557 control = create_tmp_var (unsigned_type_node, ".section");
8558 gimple_omp_sections_set_control (stmt, control);
8560 new_body = gimple_omp_body (stmt);
8561 gimple_omp_set_body (stmt, NULL);
8562 tgsi = gsi_start (new_body);
8563 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8565 omp_context *sctx;
8566 gimple *sec_start;
8568 sec_start = gsi_stmt (tgsi);
8569 sctx = maybe_lookup_ctx (sec_start);
8570 gcc_assert (sctx);
8572 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8573 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8574 GSI_CONTINUE_LINKING);
8575 gimple_omp_set_body (sec_start, NULL);
8577 if (gsi_one_before_end_p (tgsi))
8579 gimple_seq l = NULL;
8580 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8581 &ilist, &l, &clist, ctx);
8582 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8583 gimple_omp_section_set_last (sec_start);
8586 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8587 GSI_CONTINUE_LINKING);
8590 block = make_node (BLOCK);
8591 bind = gimple_build_bind (NULL, new_body, block);
8593 olist = NULL;
8594 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8595 &clist, ctx);
8596 if (clist)
8598 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8599 gcall *g = gimple_build_call (fndecl, 0);
8600 gimple_seq_add_stmt (&olist, g);
8601 gimple_seq_add_seq (&olist, clist);
8602 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8603 g = gimple_build_call (fndecl, 0);
8604 gimple_seq_add_stmt (&olist, g);
8607 block = make_node (BLOCK);
8608 new_stmt = gimple_build_bind (NULL, NULL, block);
8609 gsi_replace (gsi_p, new_stmt, true);
8611 pop_gimplify_context (new_stmt);
8612 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8613 BLOCK_VARS (block) = gimple_bind_vars (bind);
8614 if (BLOCK_VARS (block))
8615 TREE_USED (block) = 1;
8617 new_body = NULL;
8618 gimple_seq_add_seq (&new_body, ilist);
8619 gimple_seq_add_stmt (&new_body, stmt);
8620 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8621 gimple_seq_add_stmt (&new_body, bind);
8623 t = gimple_build_omp_continue (control, control);
8624 gimple_seq_add_stmt (&new_body, t);
8626 gimple_seq_add_seq (&new_body, olist);
8627 if (ctx->cancellable)
8628 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8629 gimple_seq_add_seq (&new_body, dlist);
8631 new_body = maybe_catch_exception (new_body);
8633 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8634 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8635 t = gimple_build_omp_return (nowait);
8636 gimple_seq_add_stmt (&new_body, t);
8637 gimple_seq_add_seq (&new_body, tred_dlist);
8638 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8640 if (rclauses)
8641 OMP_CLAUSE_DECL (rclauses) = rtmp;
8643 gimple_bind_set_body (new_stmt, new_body);
8647 /* A subroutine of lower_omp_single. Expand the simple form of
8648 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8650 if (GOMP_single_start ())
8651 BODY;
8652 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8654 FIXME. It may be better to delay expanding the logic of this until
8655 pass_expand_omp. The expanded logic may make the job more difficult
8656 to a synchronization analysis pass. */
8658 static void
8659 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8661 location_t loc = gimple_location (single_stmt);
8662 tree tlabel = create_artificial_label (loc);
8663 tree flabel = create_artificial_label (loc);
8664 gimple *call, *cond;
8665 tree lhs, decl;
8667 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8668 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8669 call = gimple_build_call (decl, 0);
8670 gimple_call_set_lhs (call, lhs);
8671 gimple_seq_add_stmt (pre_p, call);
8673 cond = gimple_build_cond (EQ_EXPR, lhs,
8674 fold_convert_loc (loc, TREE_TYPE (lhs),
8675 boolean_true_node),
8676 tlabel, flabel);
8677 gimple_seq_add_stmt (pre_p, cond);
8678 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8679 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8680 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8684 /* A subroutine of lower_omp_single. Expand the simple form of
8685 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8687 #pragma omp single copyprivate (a, b, c)
8689 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8692 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8694 BODY;
8695 copyout.a = a;
8696 copyout.b = b;
8697 copyout.c = c;
8698 GOMP_single_copy_end (&copyout);
8700 else
8702 a = copyout_p->a;
8703 b = copyout_p->b;
8704 c = copyout_p->c;
8706 GOMP_barrier ();
8709 FIXME. It may be better to delay expanding the logic of this until
8710 pass_expand_omp. The expanded logic may make the job more difficult
8711 to a synchronization analysis pass. */
8713 static void
8714 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8715 omp_context *ctx)
8717 tree ptr_type, t, l0, l1, l2, bfn_decl;
8718 gimple_seq copyin_seq;
8719 location_t loc = gimple_location (single_stmt);
8721 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8723 ptr_type = build_pointer_type (ctx->record_type);
8724 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8726 l0 = create_artificial_label (loc);
8727 l1 = create_artificial_label (loc);
8728 l2 = create_artificial_label (loc);
8730 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8731 t = build_call_expr_loc (loc, bfn_decl, 0);
8732 t = fold_convert_loc (loc, ptr_type, t);
8733 gimplify_assign (ctx->receiver_decl, t, pre_p);
8735 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8736 build_int_cst (ptr_type, 0));
8737 t = build3 (COND_EXPR, void_type_node, t,
8738 build_and_jump (&l0), build_and_jump (&l1));
8739 gimplify_and_add (t, pre_p);
8741 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8743 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8745 copyin_seq = NULL;
8746 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8747 &copyin_seq, ctx);
8749 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8750 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8751 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8752 gimplify_and_add (t, pre_p);
8754 t = build_and_jump (&l2);
8755 gimplify_and_add (t, pre_p);
8757 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8759 gimple_seq_add_seq (pre_p, copyin_seq);
8761 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8765 /* Expand code for an OpenMP single directive. */
8767 static void
8768 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8770 tree block;
8771 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8772 gbind *bind;
8773 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8775 push_gimplify_context ();
8777 block = make_node (BLOCK);
8778 bind = gimple_build_bind (NULL, NULL, block);
8779 gsi_replace (gsi_p, bind, true);
8780 bind_body = NULL;
8781 dlist = NULL;
8782 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8783 &bind_body, &dlist, ctx, NULL);
8784 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8786 gimple_seq_add_stmt (&bind_body, single_stmt);
8788 if (ctx->record_type)
8789 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8790 else
8791 lower_omp_single_simple (single_stmt, &bind_body);
8793 gimple_omp_set_body (single_stmt, NULL);
8795 gimple_seq_add_seq (&bind_body, dlist);
8797 bind_body = maybe_catch_exception (bind_body);
8799 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8800 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8801 gimple *g = gimple_build_omp_return (nowait);
8802 gimple_seq_add_stmt (&bind_body_tail, g);
8803 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8804 if (ctx->record_type)
8806 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8807 tree clobber = build_clobber (ctx->record_type);
8808 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8809 clobber), GSI_SAME_STMT);
8811 gimple_seq_add_seq (&bind_body, bind_body_tail);
8812 gimple_bind_set_body (bind, bind_body);
8814 pop_gimplify_context (bind);
8816 gimple_bind_append_vars (bind, ctx->block_vars);
8817 BLOCK_VARS (block) = ctx->block_vars;
8818 if (BLOCK_VARS (block))
8819 TREE_USED (block) = 1;
8823 /* Lower code for an OMP scope directive. */
8825 static void
8826 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8828 tree block;
8829 gimple *scope_stmt = gsi_stmt (*gsi_p);
8830 gbind *bind;
8831 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8832 gimple_seq tred_dlist = NULL;
8834 push_gimplify_context ();
8836 block = make_node (BLOCK);
8837 bind = gimple_build_bind (NULL, NULL, block);
8838 gsi_replace (gsi_p, bind, true);
8839 bind_body = NULL;
8840 dlist = NULL;
8842 tree rclauses
8843 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8844 OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8845 if (rclauses)
8847 tree type = build_pointer_type (pointer_sized_int_node);
8848 tree temp = create_tmp_var (type);
8849 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8850 OMP_CLAUSE_DECL (c) = temp;
8851 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8852 gimple_omp_scope_set_clauses (scope_stmt, c);
8853 lower_omp_task_reductions (ctx, OMP_SCOPE,
8854 gimple_omp_scope_clauses (scope_stmt),
8855 &bind_body, &tred_dlist);
8856 rclauses = c;
8857 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8858 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8859 gimple_seq_add_stmt (&bind_body, stmt);
8862 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8863 &bind_body, &dlist, ctx, NULL);
8864 lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8866 gimple_seq_add_stmt (&bind_body, scope_stmt);
8868 gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8870 gimple_omp_set_body (scope_stmt, NULL);
8872 gimple_seq clist = NULL;
8873 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8874 &bind_body, &clist, ctx);
8875 if (clist)
8877 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8878 gcall *g = gimple_build_call (fndecl, 0);
8879 gimple_seq_add_stmt (&bind_body, g);
8880 gimple_seq_add_seq (&bind_body, clist);
8881 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8882 g = gimple_build_call (fndecl, 0);
8883 gimple_seq_add_stmt (&bind_body, g);
8886 gimple_seq_add_seq (&bind_body, dlist);
8888 bind_body = maybe_catch_exception (bind_body);
8890 bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
8891 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8892 gimple *g = gimple_build_omp_return (nowait);
8893 gimple_seq_add_stmt (&bind_body_tail, g);
8894 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
8895 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8896 if (ctx->record_type)
8898 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8899 tree clobber = build_clobber (ctx->record_type);
8900 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8901 clobber), GSI_SAME_STMT);
8903 gimple_seq_add_seq (&bind_body, bind_body_tail);
8905 gimple_bind_set_body (bind, bind_body);
8907 pop_gimplify_context (bind);
8909 gimple_bind_append_vars (bind, ctx->block_vars);
8910 BLOCK_VARS (block) = ctx->block_vars;
8911 if (BLOCK_VARS (block))
8912 TREE_USED (block) = 1;
8914 /* Expand code for an OpenMP master or masked directive. */
8916 static void
8917 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8919 tree block, lab = NULL, x, bfn_decl;
8920 gimple *stmt = gsi_stmt (*gsi_p);
8921 gbind *bind;
8922 location_t loc = gimple_location (stmt);
8923 gimple_seq tseq;
8924 tree filter = integer_zero_node;
8926 push_gimplify_context ();
8928 if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
8930 filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
8931 OMP_CLAUSE_FILTER);
8932 if (filter)
8933 filter = fold_convert (integer_type_node,
8934 OMP_CLAUSE_FILTER_EXPR (filter));
8935 else
8936 filter = integer_zero_node;
8938 block = make_node (BLOCK);
8939 bind = gimple_build_bind (NULL, NULL, block);
8940 gsi_replace (gsi_p, bind, true);
8941 gimple_bind_add_stmt (bind, stmt);
8943 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8944 x = build_call_expr_loc (loc, bfn_decl, 0);
8945 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
8946 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8947 tseq = NULL;
8948 gimplify_and_add (x, &tseq);
8949 gimple_bind_add_seq (bind, tseq);
8951 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8952 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8953 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8954 gimple_omp_set_body (stmt, NULL);
8956 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8958 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8960 pop_gimplify_context (bind);
8962 gimple_bind_append_vars (bind, ctx->block_vars);
8963 BLOCK_VARS (block) = ctx->block_vars;
8966 /* Helper function for lower_omp_task_reductions. For a specific PASS
8967 find out the current clause it should be processed, or return false
8968 if all have been processed already. */
8970 static inline bool
8971 omp_task_reduction_iterate (int pass, enum tree_code code,
8972 enum omp_clause_code ccode, tree *c, tree *decl,
8973 tree *type, tree *next)
8975 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8977 if (ccode == OMP_CLAUSE_REDUCTION
8978 && code != OMP_TASKLOOP
8979 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8980 continue;
8981 *decl = OMP_CLAUSE_DECL (*c);
8982 *type = TREE_TYPE (*decl);
8983 if (TREE_CODE (*decl) == MEM_REF)
8985 if (pass != 1)
8986 continue;
8988 else
8990 if (omp_privatize_by_reference (*decl))
8991 *type = TREE_TYPE (*type);
8992 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8993 continue;
8995 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8996 return true;
8998 *decl = NULL_TREE;
8999 *type = NULL_TREE;
9000 *next = NULL_TREE;
9001 return false;
9004 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9005 OMP_TASKGROUP only with task modifier). Register mapping of those in
9006 START sequence and reducing them and unregister them in the END sequence. */
9008 static void
9009 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
9010 gimple_seq *start, gimple_seq *end)
9012 enum omp_clause_code ccode
9013 = (code == OMP_TASKGROUP
9014 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
9015 tree cancellable = NULL_TREE;
9016 clauses = omp_task_reductions_find_first (clauses, code, ccode);
9017 if (clauses == NULL_TREE)
9018 return;
9019 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9021 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
9022 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
9023 && outer->cancellable)
9025 cancellable = error_mark_node;
9026 break;
9028 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
9029 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
9030 break;
9032 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
9033 tree *last = &TYPE_FIELDS (record_type);
9034 unsigned cnt = 0;
9035 if (cancellable)
9037 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9038 ptr_type_node);
9039 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9040 integer_type_node);
9041 *last = field;
9042 DECL_CHAIN (field) = ifield;
9043 last = &DECL_CHAIN (ifield);
9044 DECL_CONTEXT (field) = record_type;
9045 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9046 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9047 DECL_CONTEXT (ifield) = record_type;
9048 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9049 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9051 for (int pass = 0; pass < 2; pass++)
9053 tree decl, type, next;
9054 for (tree c = clauses;
9055 omp_task_reduction_iterate (pass, code, ccode,
9056 &c, &decl, &type, &next); c = next)
9058 ++cnt;
9059 tree new_type = type;
9060 if (ctx->outer)
9061 new_type = remap_type (type, &ctx->outer->cb);
9062 tree field
9063 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9064 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9065 new_type);
9066 if (DECL_P (decl) && type == TREE_TYPE (decl))
9068 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9069 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9070 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9072 else
9073 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9074 DECL_CONTEXT (field) = record_type;
9075 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9076 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9077 *last = field;
9078 last = &DECL_CHAIN (field);
9079 tree bfield
9080 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9081 boolean_type_node);
9082 DECL_CONTEXT (bfield) = record_type;
9083 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9084 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9085 *last = bfield;
9086 last = &DECL_CHAIN (bfield);
9089 *last = NULL_TREE;
9090 layout_type (record_type);
9092 /* Build up an array which registers with the runtime all the reductions
9093 and deregisters them at the end. Format documented in libgomp/task.c. */
9094 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9095 tree avar = create_tmp_var_raw (atype);
9096 gimple_add_tmp_var (avar);
9097 TREE_ADDRESSABLE (avar) = 1;
9098 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9099 NULL_TREE, NULL_TREE);
9100 tree t = build_int_cst (pointer_sized_int_node, cnt);
9101 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9102 gimple_seq seq = NULL;
9103 tree sz = fold_convert (pointer_sized_int_node,
9104 TYPE_SIZE_UNIT (record_type));
9105 int cachesz = 64;
9106 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9107 build_int_cst (pointer_sized_int_node, cachesz - 1));
9108 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9109 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9110 ctx->task_reductions.create (1 + cnt);
9111 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9112 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9113 ? sz : NULL_TREE);
9114 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9115 gimple_seq_add_seq (start, seq);
9116 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9117 NULL_TREE, NULL_TREE);
9118 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9119 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9120 NULL_TREE, NULL_TREE);
9121 t = build_int_cst (pointer_sized_int_node,
9122 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9123 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9124 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9125 NULL_TREE, NULL_TREE);
9126 t = build_int_cst (pointer_sized_int_node, -1);
9127 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9128 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9129 NULL_TREE, NULL_TREE);
9130 t = build_int_cst (pointer_sized_int_node, 0);
9131 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9133 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9134 and for each task reduction checks a bool right after the private variable
9135 within that thread's chunk; if the bool is clear, it hasn't been
9136 initialized and thus isn't going to be reduced nor destructed, otherwise
9137 reduce and destruct it. */
9138 tree idx = create_tmp_var (size_type_node);
9139 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9140 tree num_thr_sz = create_tmp_var (size_type_node);
9141 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9142 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9143 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9144 gimple *g;
9145 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9147 /* For worksharing constructs or scope, only perform it in the master
9148 thread, with the exception of cancelled implicit barriers - then only
9149 handle the current thread. */
9150 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9151 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9152 tree thr_num = create_tmp_var (integer_type_node);
9153 g = gimple_build_call (t, 0);
9154 gimple_call_set_lhs (g, thr_num);
9155 gimple_seq_add_stmt (end, g);
9156 if (cancellable)
9158 tree c;
9159 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9160 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9161 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9162 if (code == OMP_FOR)
9163 c = gimple_omp_for_clauses (ctx->stmt);
9164 else if (code == OMP_SECTIONS)
9165 c = gimple_omp_sections_clauses (ctx->stmt);
9166 else /* if (code == OMP_SCOPE) */
9167 c = gimple_omp_scope_clauses (ctx->stmt);
9168 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9169 cancellable = c;
9170 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9171 lab5, lab6);
9172 gimple_seq_add_stmt (end, g);
9173 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9174 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9175 gimple_seq_add_stmt (end, g);
9176 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9177 build_one_cst (TREE_TYPE (idx)));
9178 gimple_seq_add_stmt (end, g);
9179 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9180 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9182 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9183 gimple_seq_add_stmt (end, g);
9184 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9186 if (code != OMP_PARALLEL)
9188 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9189 tree num_thr = create_tmp_var (integer_type_node);
9190 g = gimple_build_call (t, 0);
9191 gimple_call_set_lhs (g, num_thr);
9192 gimple_seq_add_stmt (end, g);
9193 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9194 gimple_seq_add_stmt (end, g);
9195 if (cancellable)
9196 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9198 else
9200 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9201 OMP_CLAUSE__REDUCTEMP_);
9202 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9203 t = fold_convert (size_type_node, t);
9204 gimplify_assign (num_thr_sz, t, end);
9206 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9207 NULL_TREE, NULL_TREE);
9208 tree data = create_tmp_var (pointer_sized_int_node);
9209 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9210 if (code == OMP_TASKLOOP)
9212 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9213 g = gimple_build_cond (NE_EXPR, data,
9214 build_zero_cst (pointer_sized_int_node),
9215 lab1, lab7);
9216 gimple_seq_add_stmt (end, g);
9218 gimple_seq_add_stmt (end, gimple_build_label (lab1));
9219 tree ptr;
9220 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9221 ptr = create_tmp_var (build_pointer_type (record_type));
9222 else
9223 ptr = create_tmp_var (ptr_type_node);
9224 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9226 tree field = TYPE_FIELDS (record_type);
9227 cnt = 0;
9228 if (cancellable)
9229 field = DECL_CHAIN (DECL_CHAIN (field));
9230 for (int pass = 0; pass < 2; pass++)
9232 tree decl, type, next;
9233 for (tree c = clauses;
9234 omp_task_reduction_iterate (pass, code, ccode,
9235 &c, &decl, &type, &next); c = next)
9237 tree var = decl, ref;
9238 if (TREE_CODE (decl) == MEM_REF)
9240 var = TREE_OPERAND (var, 0);
9241 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9242 var = TREE_OPERAND (var, 0);
9243 tree v = var;
9244 if (TREE_CODE (var) == ADDR_EXPR)
9245 var = TREE_OPERAND (var, 0);
9246 else if (TREE_CODE (var) == INDIRECT_REF)
9247 var = TREE_OPERAND (var, 0);
9248 tree orig_var = var;
9249 if (is_variable_sized (var))
9251 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9252 var = DECL_VALUE_EXPR (var);
9253 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
9254 var = TREE_OPERAND (var, 0);
9255 gcc_assert (DECL_P (var));
9257 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9258 if (orig_var != var)
9259 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9260 else if (TREE_CODE (v) == ADDR_EXPR)
9261 t = build_fold_addr_expr (t);
9262 else if (TREE_CODE (v) == INDIRECT_REF)
9263 t = build_fold_indirect_ref (t);
9264 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9266 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9267 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9268 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9270 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9271 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9272 fold_convert (size_type_node,
9273 TREE_OPERAND (decl, 1)));
9275 else
9277 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9278 if (!omp_privatize_by_reference (decl))
9279 t = build_fold_addr_expr (t);
9281 t = fold_convert (pointer_sized_int_node, t);
9282 seq = NULL;
9283 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9284 gimple_seq_add_seq (start, seq);
9285 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9286 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9287 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9288 t = unshare_expr (byte_position (field));
9289 t = fold_convert (pointer_sized_int_node, t);
9290 ctx->task_reduction_map->put (c, cnt);
9291 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9292 ? t : NULL_TREE);
9293 seq = NULL;
9294 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9295 gimple_seq_add_seq (start, seq);
9296 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9297 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9298 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9300 tree bfield = DECL_CHAIN (field);
9301 tree cond;
9302 if (code == OMP_PARALLEL
9303 || code == OMP_FOR
9304 || code == OMP_SECTIONS
9305 || code == OMP_SCOPE)
9306 /* In parallel, worksharing or scope all threads unconditionally
9307 initialize all their task reduction private variables. */
9308 cond = boolean_true_node;
9309 else if (TREE_TYPE (ptr) == ptr_type_node)
9311 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9312 unshare_expr (byte_position (bfield)));
9313 seq = NULL;
9314 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9315 gimple_seq_add_seq (end, seq);
9316 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9317 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9318 build_int_cst (pbool, 0));
9320 else
9321 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9322 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9323 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9324 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9325 tree condv = create_tmp_var (boolean_type_node);
9326 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9327 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9328 lab3, lab4);
9329 gimple_seq_add_stmt (end, g);
9330 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9331 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9333 /* If this reduction doesn't need destruction and parallel
9334 has been cancelled, there is nothing to do for this
9335 reduction, so jump around the merge operation. */
9336 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9337 g = gimple_build_cond (NE_EXPR, cancellable,
9338 build_zero_cst (TREE_TYPE (cancellable)),
9339 lab4, lab5);
9340 gimple_seq_add_stmt (end, g);
9341 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9344 tree new_var;
9345 if (TREE_TYPE (ptr) == ptr_type_node)
9347 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9348 unshare_expr (byte_position (field)));
9349 seq = NULL;
9350 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9351 gimple_seq_add_seq (end, seq);
9352 tree pbool = build_pointer_type (TREE_TYPE (field));
9353 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9354 build_int_cst (pbool, 0));
9356 else
9357 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9358 build_simple_mem_ref (ptr), field, NULL_TREE);
9360 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9361 if (TREE_CODE (decl) != MEM_REF
9362 && omp_privatize_by_reference (decl))
9363 ref = build_simple_mem_ref (ref);
9364 /* reduction(-:var) sums up the partial results, so it acts
9365 identically to reduction(+:var). */
9366 if (rcode == MINUS_EXPR)
9367 rcode = PLUS_EXPR;
9368 if (TREE_CODE (decl) == MEM_REF)
9370 tree type = TREE_TYPE (new_var);
9371 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9372 tree i = create_tmp_var (TREE_TYPE (v));
9373 tree ptype = build_pointer_type (TREE_TYPE (type));
9374 if (DECL_P (v))
9376 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9377 tree vv = create_tmp_var (TREE_TYPE (v));
9378 gimplify_assign (vv, v, start);
9379 v = vv;
9381 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9382 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9383 new_var = build_fold_addr_expr (new_var);
9384 new_var = fold_convert (ptype, new_var);
9385 ref = fold_convert (ptype, ref);
9386 tree m = create_tmp_var (ptype);
9387 gimplify_assign (m, new_var, end);
9388 new_var = m;
9389 m = create_tmp_var (ptype);
9390 gimplify_assign (m, ref, end);
9391 ref = m;
9392 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9393 tree body = create_artificial_label (UNKNOWN_LOCATION);
9394 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9395 gimple_seq_add_stmt (end, gimple_build_label (body));
9396 tree priv = build_simple_mem_ref (new_var);
9397 tree out = build_simple_mem_ref (ref);
9398 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9400 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9401 tree decl_placeholder
9402 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9403 tree lab6 = NULL_TREE;
9404 if (cancellable)
9406 /* If this reduction needs destruction and parallel
9407 has been cancelled, jump around the merge operation
9408 to the destruction. */
9409 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9410 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9411 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9412 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9413 lab6, lab5);
9414 gimple_seq_add_stmt (end, g);
9415 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9417 SET_DECL_VALUE_EXPR (placeholder, out);
9418 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9419 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9420 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9421 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9422 gimple_seq_add_seq (end,
9423 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9424 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9425 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9427 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9428 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9430 if (cancellable)
9431 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9432 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9433 if (x)
9435 gimple_seq tseq = NULL;
9436 gimplify_stmt (&x, &tseq);
9437 gimple_seq_add_seq (end, tseq);
9440 else
9442 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9443 out = unshare_expr (out);
9444 gimplify_assign (out, x, end);
9446 gimple *g
9447 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9448 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9449 gimple_seq_add_stmt (end, g);
9450 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9451 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9452 gimple_seq_add_stmt (end, g);
9453 g = gimple_build_assign (i, PLUS_EXPR, i,
9454 build_int_cst (TREE_TYPE (i), 1));
9455 gimple_seq_add_stmt (end, g);
9456 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9457 gimple_seq_add_stmt (end, g);
9458 gimple_seq_add_stmt (end, gimple_build_label (endl));
9460 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9462 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9463 tree oldv = NULL_TREE;
9464 tree lab6 = NULL_TREE;
9465 if (cancellable)
9467 /* If this reduction needs destruction and parallel
9468 has been cancelled, jump around the merge operation
9469 to the destruction. */
9470 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9471 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9472 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9473 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9474 lab6, lab5);
9475 gimple_seq_add_stmt (end, g);
9476 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9478 if (omp_privatize_by_reference (decl)
9479 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9480 TREE_TYPE (ref)))
9481 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9482 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9483 tree refv = create_tmp_var (TREE_TYPE (ref));
9484 gimplify_assign (refv, ref, end);
9485 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9486 SET_DECL_VALUE_EXPR (placeholder, ref);
9487 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9488 tree d = maybe_lookup_decl (decl, ctx);
9489 gcc_assert (d);
9490 if (DECL_HAS_VALUE_EXPR_P (d))
9491 oldv = DECL_VALUE_EXPR (d);
9492 if (omp_privatize_by_reference (var))
9494 tree v = fold_convert (TREE_TYPE (d),
9495 build_fold_addr_expr (new_var));
9496 SET_DECL_VALUE_EXPR (d, v);
9498 else
9499 SET_DECL_VALUE_EXPR (d, new_var);
9500 DECL_HAS_VALUE_EXPR_P (d) = 1;
9501 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9502 if (oldv)
9503 SET_DECL_VALUE_EXPR (d, oldv);
9504 else
9506 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9507 DECL_HAS_VALUE_EXPR_P (d) = 0;
9509 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9510 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9511 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9512 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9513 if (cancellable)
9514 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9515 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9516 if (x)
9518 gimple_seq tseq = NULL;
9519 gimplify_stmt (&x, &tseq);
9520 gimple_seq_add_seq (end, tseq);
9523 else
9525 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9526 ref = unshare_expr (ref);
9527 gimplify_assign (ref, x, end);
9529 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9530 ++cnt;
9531 field = DECL_CHAIN (bfield);
9535 if (code == OMP_TASKGROUP)
9537 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9538 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9539 gimple_seq_add_stmt (start, g);
9541 else
9543 tree c;
9544 if (code == OMP_FOR)
9545 c = gimple_omp_for_clauses (ctx->stmt);
9546 else if (code == OMP_SECTIONS)
9547 c = gimple_omp_sections_clauses (ctx->stmt);
9548 else if (code == OMP_SCOPE)
9549 c = gimple_omp_scope_clauses (ctx->stmt);
9550 else
9551 c = gimple_omp_taskreg_clauses (ctx->stmt);
9552 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9553 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9554 build_fold_addr_expr (avar));
9555 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9558 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9559 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9560 size_one_node));
9561 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9562 gimple_seq_add_stmt (end, g);
9563 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9564 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9566 enum built_in_function bfn
9567 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9568 t = builtin_decl_explicit (bfn);
9569 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9570 tree arg;
9571 if (cancellable)
9573 arg = create_tmp_var (c_bool_type);
9574 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9575 cancellable));
9577 else
9578 arg = build_int_cst (c_bool_type, 0);
9579 g = gimple_build_call (t, 1, arg);
9581 else
9583 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9584 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9586 gimple_seq_add_stmt (end, g);
9587 if (lab7)
9588 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9589 t = build_constructor (atype, NULL);
9590 TREE_THIS_VOLATILE (t) = 1;
9591 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9594 /* Expand code for an OpenMP taskgroup directive. */
9596 static void
9597 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9599 gimple *stmt = gsi_stmt (*gsi_p);
9600 gcall *x;
9601 gbind *bind;
9602 gimple_seq dseq = NULL;
9603 tree block = make_node (BLOCK);
9605 bind = gimple_build_bind (NULL, NULL, block);
9606 gsi_replace (gsi_p, bind, true);
9607 gimple_bind_add_stmt (bind, stmt);
9609 push_gimplify_context ();
9611 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9613 gimple_bind_add_stmt (bind, x);
9615 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9616 gimple_omp_taskgroup_clauses (stmt),
9617 gimple_bind_body_ptr (bind), &dseq);
9619 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9620 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9621 gimple_omp_set_body (stmt, NULL);
9623 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9624 gimple_bind_add_seq (bind, dseq);
9626 pop_gimplify_context (bind);
9628 gimple_bind_append_vars (bind, ctx->block_vars);
9629 BLOCK_VARS (block) = ctx->block_vars;
9633 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9635 static void
9636 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9637 omp_context *ctx)
9639 struct omp_for_data fd;
9640 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9641 return;
9643 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9644 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9645 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9646 if (!fd.ordered)
9647 return;
9649 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9650 tree c = gimple_omp_ordered_clauses (ord_stmt);
9651 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9652 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9654 /* Merge depend clauses from multiple adjacent
9655 #pragma omp ordered depend(sink:...) constructs
9656 into one #pragma omp ordered depend(sink:...), so that
9657 we can optimize them together. */
9658 gimple_stmt_iterator gsi = *gsi_p;
9659 gsi_next (&gsi);
9660 while (!gsi_end_p (gsi))
9662 gimple *stmt = gsi_stmt (gsi);
9663 if (is_gimple_debug (stmt)
9664 || gimple_code (stmt) == GIMPLE_NOP)
9666 gsi_next (&gsi);
9667 continue;
9669 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9670 break;
9671 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9672 c = gimple_omp_ordered_clauses (ord_stmt2);
9673 if (c == NULL_TREE
9674 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
9675 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9676 break;
9677 while (*list_p)
9678 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9679 *list_p = c;
9680 gsi_remove (&gsi, true);
9684 /* Canonicalize sink dependence clauses into one folded clause if
9685 possible.
9687 The basic algorithm is to create a sink vector whose first
9688 element is the GCD of all the first elements, and whose remaining
9689 elements are the minimum of the subsequent columns.
9691 We ignore dependence vectors whose first element is zero because
9692 such dependencies are known to be executed by the same thread.
9694 We take into account the direction of the loop, so a minimum
9695 becomes a maximum if the loop is iterating forwards. We also
9696 ignore sink clauses where the loop direction is unknown, or where
9697 the offsets are clearly invalid because they are not a multiple
9698 of the loop increment.
9700 For example:
9702 #pragma omp for ordered(2)
9703 for (i=0; i < N; ++i)
9704 for (j=0; j < M; ++j)
9706 #pragma omp ordered \
9707 depend(sink:i-8,j-2) \
9708 depend(sink:i,j-1) \ // Completely ignored because i+0.
9709 depend(sink:i-4,j-3) \
9710 depend(sink:i-6,j-4)
9711 #pragma omp ordered depend(source)
9714 Folded clause is:
9716 depend(sink:-gcd(8,4,6),-min(2,3,4))
9717 -or-
9718 depend(sink:-2,-2)
9721 /* FIXME: Computing GCD's where the first element is zero is
9722 non-trivial in the presence of collapsed loops. Do this later. */
9723 if (fd.collapse > 1)
9724 return;
9726 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9728 /* wide_int is not a POD so it must be default-constructed. */
9729 for (unsigned i = 0; i != 2 * len - 1; ++i)
9730 new (static_cast<void*>(folded_deps + i)) wide_int ();
9732 tree folded_dep = NULL_TREE;
9733 /* TRUE if the first dimension's offset is negative. */
9734 bool neg_offset_p = false;
9736 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9737 unsigned int i;
9738 while ((c = *list_p) != NULL)
9740 bool remove = false;
9742 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
9743 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9744 goto next_ordered_clause;
9746 tree vec;
9747 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9748 vec && TREE_CODE (vec) == TREE_LIST;
9749 vec = TREE_CHAIN (vec), ++i)
9751 gcc_assert (i < len);
9753 /* omp_extract_for_data has canonicalized the condition. */
9754 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9755 || fd.loops[i].cond_code == GT_EXPR);
9756 bool forward = fd.loops[i].cond_code == LT_EXPR;
9757 bool maybe_lexically_later = true;
9759 /* While the committee makes up its mind, bail if we have any
9760 non-constant steps. */
9761 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9762 goto lower_omp_ordered_ret;
9764 tree itype = TREE_TYPE (TREE_VALUE (vec));
9765 if (POINTER_TYPE_P (itype))
9766 itype = sizetype;
9767 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9768 TYPE_PRECISION (itype),
9769 TYPE_SIGN (itype));
9771 /* Ignore invalid offsets that are not multiples of the step. */
9772 if (!wi::multiple_of_p (wi::abs (offset),
9773 wi::abs (wi::to_wide (fd.loops[i].step)),
9774 UNSIGNED))
9776 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9777 "ignoring sink clause with offset that is not "
9778 "a multiple of the loop step");
9779 remove = true;
9780 goto next_ordered_clause;
9783 /* Calculate the first dimension. The first dimension of
9784 the folded dependency vector is the GCD of the first
9785 elements, while ignoring any first elements whose offset
9786 is 0. */
9787 if (i == 0)
9789 /* Ignore dependence vectors whose first dimension is 0. */
9790 if (offset == 0)
9792 remove = true;
9793 goto next_ordered_clause;
9795 else
9797 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9799 error_at (OMP_CLAUSE_LOCATION (c),
9800 "first offset must be in opposite direction "
9801 "of loop iterations");
9802 goto lower_omp_ordered_ret;
9804 if (forward)
9805 offset = -offset;
9806 neg_offset_p = forward;
9807 /* Initialize the first time around. */
9808 if (folded_dep == NULL_TREE)
9810 folded_dep = c;
9811 folded_deps[0] = offset;
9813 else
9814 folded_deps[0] = wi::gcd (folded_deps[0],
9815 offset, UNSIGNED);
9818 /* Calculate minimum for the remaining dimensions. */
9819 else
9821 folded_deps[len + i - 1] = offset;
9822 if (folded_dep == c)
9823 folded_deps[i] = offset;
9824 else if (maybe_lexically_later
9825 && !wi::eq_p (folded_deps[i], offset))
9827 if (forward ^ wi::gts_p (folded_deps[i], offset))
9829 unsigned int j;
9830 folded_dep = c;
9831 for (j = 1; j <= i; j++)
9832 folded_deps[j] = folded_deps[len + j - 1];
9834 else
9835 maybe_lexically_later = false;
9839 gcc_assert (i == len);
9841 remove = true;
9843 next_ordered_clause:
9844 if (remove)
9845 *list_p = OMP_CLAUSE_CHAIN (c);
9846 else
9847 list_p = &OMP_CLAUSE_CHAIN (c);
9850 if (folded_dep)
9852 if (neg_offset_p)
9853 folded_deps[0] = -folded_deps[0];
9855 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9856 if (POINTER_TYPE_P (itype))
9857 itype = sizetype;
9859 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9860 = wide_int_to_tree (itype, folded_deps[0]);
9861 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9862 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9865 lower_omp_ordered_ret:
9867 /* Ordered without clauses is #pragma omp threads, while we want
9868 a nop instead if we remove all clauses. */
9869 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9870 gsi_replace (gsi_p, gimple_build_nop (), true);
9874 /* Expand code for an OpenMP ordered directive. */
9876 static void
9877 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9879 tree block;
9880 gimple *stmt = gsi_stmt (*gsi_p), *g;
9881 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9882 gcall *x;
9883 gbind *bind;
9884 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9885 OMP_CLAUSE_SIMD);
9886 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9887 loop. */
9888 bool maybe_simt
9889 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9890 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9891 OMP_CLAUSE_THREADS);
9893 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9894 OMP_CLAUSE_DEPEND))
9896 /* FIXME: This is needs to be moved to the expansion to verify various
9897 conditions only testable on cfg with dominators computed, and also
9898 all the depend clauses to be merged still might need to be available
9899 for the runtime checks. */
9900 if (0)
9901 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9902 return;
9905 push_gimplify_context ();
9907 block = make_node (BLOCK);
9908 bind = gimple_build_bind (NULL, NULL, block);
9909 gsi_replace (gsi_p, bind, true);
9910 gimple_bind_add_stmt (bind, stmt);
9912 if (simd)
9914 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9915 build_int_cst (NULL_TREE, threads));
9916 cfun->has_simduid_loops = true;
9918 else
9919 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9921 gimple_bind_add_stmt (bind, x);
9923 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9924 if (maybe_simt)
9926 counter = create_tmp_var (integer_type_node);
9927 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9928 gimple_call_set_lhs (g, counter);
9929 gimple_bind_add_stmt (bind, g);
9931 body = create_artificial_label (UNKNOWN_LOCATION);
9932 test = create_artificial_label (UNKNOWN_LOCATION);
9933 gimple_bind_add_stmt (bind, gimple_build_label (body));
9935 tree simt_pred = create_tmp_var (integer_type_node);
9936 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9937 gimple_call_set_lhs (g, simt_pred);
9938 gimple_bind_add_stmt (bind, g);
9940 tree t = create_artificial_label (UNKNOWN_LOCATION);
9941 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9942 gimple_bind_add_stmt (bind, g);
9944 gimple_bind_add_stmt (bind, gimple_build_label (t));
9946 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9947 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9948 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9949 gimple_omp_set_body (stmt, NULL);
9951 if (maybe_simt)
9953 gimple_bind_add_stmt (bind, gimple_build_label (test));
9954 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9955 gimple_bind_add_stmt (bind, g);
9957 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9958 tree nonneg = create_tmp_var (integer_type_node);
9959 gimple_seq tseq = NULL;
9960 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9961 gimple_bind_add_seq (bind, tseq);
9963 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9964 gimple_call_set_lhs (g, nonneg);
9965 gimple_bind_add_stmt (bind, g);
9967 tree end = create_artificial_label (UNKNOWN_LOCATION);
9968 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9969 gimple_bind_add_stmt (bind, g);
9971 gimple_bind_add_stmt (bind, gimple_build_label (end));
9973 if (simd)
9974 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9975 build_int_cst (NULL_TREE, threads));
9976 else
9977 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9979 gimple_bind_add_stmt (bind, x);
9981 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9983 pop_gimplify_context (bind);
9985 gimple_bind_append_vars (bind, ctx->block_vars);
9986 BLOCK_VARS (block) = gimple_bind_vars (bind);
9990 /* Expand code for an OpenMP scan directive and the structured block
9991 before the scan directive. */
9993 static void
9994 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9996 gimple *stmt = gsi_stmt (*gsi_p);
9997 bool has_clauses
9998 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9999 tree lane = NULL_TREE;
10000 gimple_seq before = NULL;
10001 omp_context *octx = ctx->outer;
10002 gcc_assert (octx);
10003 if (octx->scan_exclusive && !has_clauses)
10005 gimple_stmt_iterator gsi2 = *gsi_p;
10006 gsi_next (&gsi2);
10007 gimple *stmt2 = gsi_stmt (gsi2);
10008 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10009 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10010 the one with exclusive clause(s), comes first. */
10011 if (stmt2
10012 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
10013 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
10015 gsi_remove (gsi_p, false);
10016 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
10017 ctx = maybe_lookup_ctx (stmt2);
10018 gcc_assert (ctx);
10019 lower_omp_scan (gsi_p, ctx);
10020 return;
10024 bool input_phase = has_clauses ^ octx->scan_inclusive;
10025 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10026 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
10027 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10028 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
10029 && !gimple_omp_for_combined_p (octx->stmt));
10030 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
10031 if (is_for_simd && octx->for_simd_scan_phase)
10032 is_simd = false;
10033 if (is_simd)
10034 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
10035 OMP_CLAUSE__SIMDUID_))
10037 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10038 lane = create_tmp_var (unsigned_type_node);
10039 tree t = build_int_cst (integer_type_node,
10040 input_phase ? 1
10041 : octx->scan_inclusive ? 2 : 3);
10042 gimple *g
10043 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10044 gimple_call_set_lhs (g, lane);
10045 gimple_seq_add_stmt (&before, g);
10048 if (is_simd || is_for)
10050 for (tree c = gimple_omp_for_clauses (octx->stmt);
10051 c; c = OMP_CLAUSE_CHAIN (c))
10052 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10053 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10055 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10056 tree var = OMP_CLAUSE_DECL (c);
10057 tree new_var = lookup_decl (var, octx);
10058 tree val = new_var;
10059 tree var2 = NULL_TREE;
10060 tree var3 = NULL_TREE;
10061 tree var4 = NULL_TREE;
10062 tree lane0 = NULL_TREE;
10063 tree new_vard = new_var;
10064 if (omp_privatize_by_reference (var))
10066 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10067 val = new_var;
10069 if (DECL_HAS_VALUE_EXPR_P (new_vard))
10071 val = DECL_VALUE_EXPR (new_vard);
10072 if (new_vard != new_var)
10074 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10075 val = TREE_OPERAND (val, 0);
10077 if (TREE_CODE (val) == ARRAY_REF
10078 && VAR_P (TREE_OPERAND (val, 0)))
10080 tree v = TREE_OPERAND (val, 0);
10081 if (lookup_attribute ("omp simd array",
10082 DECL_ATTRIBUTES (v)))
10084 val = unshare_expr (val);
10085 lane0 = TREE_OPERAND (val, 1);
10086 TREE_OPERAND (val, 1) = lane;
10087 var2 = lookup_decl (v, octx);
10088 if (octx->scan_exclusive)
10089 var4 = lookup_decl (var2, octx);
10090 if (input_phase
10091 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10092 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10093 if (!input_phase)
10095 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10096 var2, lane, NULL_TREE, NULL_TREE);
10097 TREE_THIS_NOTRAP (var2) = 1;
10098 if (octx->scan_exclusive)
10100 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10101 var4, lane, NULL_TREE,
10102 NULL_TREE);
10103 TREE_THIS_NOTRAP (var4) = 1;
10106 else
10107 var2 = val;
10110 gcc_assert (var2);
10112 else
10114 var2 = build_outer_var_ref (var, octx);
10115 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10117 var3 = maybe_lookup_decl (new_vard, octx);
10118 if (var3 == new_vard || var3 == NULL_TREE)
10119 var3 = NULL_TREE;
10120 else if (is_simd && octx->scan_exclusive && !input_phase)
10122 var4 = maybe_lookup_decl (var3, octx);
10123 if (var4 == var3 || var4 == NULL_TREE)
10125 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10127 var4 = var3;
10128 var3 = NULL_TREE;
10130 else
10131 var4 = NULL_TREE;
10135 if (is_simd
10136 && octx->scan_exclusive
10137 && !input_phase
10138 && var4 == NULL_TREE)
10139 var4 = create_tmp_var (TREE_TYPE (val));
10141 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10143 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10144 if (input_phase)
10146 if (var3)
10148 /* If we've added a separate identity element
10149 variable, copy it over into val. */
10150 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10151 var3);
10152 gimplify_and_add (x, &before);
10154 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10156 /* Otherwise, assign to it the identity element. */
10157 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10158 if (is_for)
10159 tseq = copy_gimple_seq_and_replace_locals (tseq);
10160 tree ref = build_outer_var_ref (var, octx);
10161 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10162 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10163 if (x)
10165 if (new_vard != new_var)
10166 val = build_fold_addr_expr_loc (clause_loc, val);
10167 SET_DECL_VALUE_EXPR (new_vard, val);
10169 SET_DECL_VALUE_EXPR (placeholder, ref);
10170 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10171 lower_omp (&tseq, octx);
10172 if (x)
10173 SET_DECL_VALUE_EXPR (new_vard, x);
10174 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10175 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10176 gimple_seq_add_seq (&before, tseq);
10177 if (is_simd)
10178 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10181 else if (is_simd)
10183 tree x;
10184 if (octx->scan_exclusive)
10186 tree v4 = unshare_expr (var4);
10187 tree v2 = unshare_expr (var2);
10188 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10189 gimplify_and_add (x, &before);
10191 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10192 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10193 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10194 tree vexpr = val;
10195 if (x && new_vard != new_var)
10196 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10197 if (x)
10198 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10199 SET_DECL_VALUE_EXPR (placeholder, var2);
10200 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10201 lower_omp (&tseq, octx);
10202 gimple_seq_add_seq (&before, tseq);
10203 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10204 if (x)
10205 SET_DECL_VALUE_EXPR (new_vard, x);
10206 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10207 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10208 if (octx->scan_inclusive)
10210 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10211 var2);
10212 gimplify_and_add (x, &before);
10214 else if (lane0 == NULL_TREE)
10216 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10217 var4);
10218 gimplify_and_add (x, &before);
10222 else
10224 if (input_phase)
10226 /* input phase. Set val to initializer before
10227 the body. */
10228 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10229 gimplify_assign (val, x, &before);
10231 else if (is_simd)
10233 /* scan phase. */
10234 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10235 if (code == MINUS_EXPR)
10236 code = PLUS_EXPR;
10238 tree x = build2 (code, TREE_TYPE (var2),
10239 unshare_expr (var2), unshare_expr (val));
10240 if (octx->scan_inclusive)
10242 gimplify_assign (unshare_expr (var2), x, &before);
10243 gimplify_assign (val, var2, &before);
10245 else
10247 gimplify_assign (unshare_expr (var4),
10248 unshare_expr (var2), &before);
10249 gimplify_assign (var2, x, &before);
10250 if (lane0 == NULL_TREE)
10251 gimplify_assign (val, var4, &before);
10255 if (octx->scan_exclusive && !input_phase && lane0)
10257 tree vexpr = unshare_expr (var4);
10258 TREE_OPERAND (vexpr, 1) = lane0;
10259 if (new_vard != new_var)
10260 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10261 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10265 if (is_simd && !is_for_simd)
10267 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10268 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10269 gsi_replace (gsi_p, gimple_build_nop (), true);
10270 return;
10272 lower_omp (gimple_omp_body_ptr (stmt), octx);
10273 if (before)
10275 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
10276 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10281 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10282 substitution of a couple of function calls. But in the NAMED case,
10283 requires that languages coordinate a symbol name. It is therefore
10284 best put here in common code. */
10286 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10288 static void
10289 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10291 tree block;
10292 tree name, lock, unlock;
10293 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10294 gbind *bind;
10295 location_t loc = gimple_location (stmt);
10296 gimple_seq tbody;
10298 name = gimple_omp_critical_name (stmt);
10299 if (name)
10301 tree decl;
10303 if (!critical_name_mutexes)
10304 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10306 tree *n = critical_name_mutexes->get (name);
10307 if (n == NULL)
10309 char *new_str;
10311 decl = create_tmp_var_raw (ptr_type_node);
10313 new_str = ACONCAT ((".gomp_critical_user_",
10314 IDENTIFIER_POINTER (name), NULL));
10315 DECL_NAME (decl) = get_identifier (new_str);
10316 TREE_PUBLIC (decl) = 1;
10317 TREE_STATIC (decl) = 1;
10318 DECL_COMMON (decl) = 1;
10319 DECL_ARTIFICIAL (decl) = 1;
10320 DECL_IGNORED_P (decl) = 1;
10322 varpool_node::finalize_decl (decl);
10324 critical_name_mutexes->put (name, decl);
10326 else
10327 decl = *n;
10329 /* If '#pragma omp critical' is inside offloaded region or
10330 inside function marked as offloadable, the symbol must be
10331 marked as offloadable too. */
10332 omp_context *octx;
10333 if (cgraph_node::get (current_function_decl)->offloadable)
10334 varpool_node::get_create (decl)->offloadable = 1;
10335 else
10336 for (octx = ctx->outer; octx; octx = octx->outer)
10337 if (is_gimple_omp_offloaded (octx->stmt))
10339 varpool_node::get_create (decl)->offloadable = 1;
10340 break;
10343 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10344 lock = build_call_expr_loc (loc, lock, 1,
10345 build_fold_addr_expr_loc (loc, decl));
10347 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10348 unlock = build_call_expr_loc (loc, unlock, 1,
10349 build_fold_addr_expr_loc (loc, decl));
10351 else
10353 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10354 lock = build_call_expr_loc (loc, lock, 0);
10356 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10357 unlock = build_call_expr_loc (loc, unlock, 0);
10360 push_gimplify_context ();
10362 block = make_node (BLOCK);
10363 bind = gimple_build_bind (NULL, NULL, block);
10364 gsi_replace (gsi_p, bind, true);
10365 gimple_bind_add_stmt (bind, stmt);
10367 tbody = gimple_bind_body (bind);
10368 gimplify_and_add (lock, &tbody);
10369 gimple_bind_set_body (bind, tbody);
10371 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10372 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10373 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10374 gimple_omp_set_body (stmt, NULL);
10376 tbody = gimple_bind_body (bind);
10377 gimplify_and_add (unlock, &tbody);
10378 gimple_bind_set_body (bind, tbody);
10380 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10382 pop_gimplify_context (bind);
10383 gimple_bind_append_vars (bind, ctx->block_vars);
10384 BLOCK_VARS (block) = gimple_bind_vars (bind);
10387 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10388 for a lastprivate clause. Given a loop control predicate of (V
10389 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10390 is appended to *DLIST, iterator initialization is appended to
10391 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10392 to be emitted in a critical section. */
10394 static void
10395 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10396 gimple_seq *dlist, gimple_seq *clist,
10397 struct omp_context *ctx)
10399 tree clauses, cond, vinit;
10400 enum tree_code cond_code;
10401 gimple_seq stmts;
10403 cond_code = fd->loop.cond_code;
10404 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10406 /* When possible, use a strict equality expression. This can let VRP
10407 type optimizations deduce the value and remove a copy. */
10408 if (tree_fits_shwi_p (fd->loop.step))
10410 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10411 if (step == 1 || step == -1)
10412 cond_code = EQ_EXPR;
10415 tree n2 = fd->loop.n2;
10416 if (fd->collapse > 1
10417 && TREE_CODE (n2) != INTEGER_CST
10418 && gimple_omp_for_combined_into_p (fd->for_stmt))
10420 struct omp_context *taskreg_ctx = NULL;
10421 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10423 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10424 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10425 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10427 if (gimple_omp_for_combined_into_p (gfor))
10429 gcc_assert (ctx->outer->outer
10430 && is_parallel_ctx (ctx->outer->outer));
10431 taskreg_ctx = ctx->outer->outer;
10433 else
10435 struct omp_for_data outer_fd;
10436 omp_extract_for_data (gfor, &outer_fd, NULL);
10437 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10440 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10441 taskreg_ctx = ctx->outer->outer;
10443 else if (is_taskreg_ctx (ctx->outer))
10444 taskreg_ctx = ctx->outer;
10445 if (taskreg_ctx)
10447 int i;
10448 tree taskreg_clauses
10449 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10450 tree innerc = omp_find_clause (taskreg_clauses,
10451 OMP_CLAUSE__LOOPTEMP_);
10452 gcc_assert (innerc);
10453 int count = fd->collapse;
10454 if (fd->non_rect
10455 && fd->last_nonrect == fd->first_nonrect + 1)
10456 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10457 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10458 count += 4;
10459 for (i = 0; i < count; i++)
10461 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10462 OMP_CLAUSE__LOOPTEMP_);
10463 gcc_assert (innerc);
10465 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10466 OMP_CLAUSE__LOOPTEMP_);
10467 if (innerc)
10468 n2 = fold_convert (TREE_TYPE (n2),
10469 lookup_decl (OMP_CLAUSE_DECL (innerc),
10470 taskreg_ctx));
10473 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10475 clauses = gimple_omp_for_clauses (fd->for_stmt);
10476 stmts = NULL;
10477 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10478 if (!gimple_seq_empty_p (stmts))
10480 gimple_seq_add_seq (&stmts, *dlist);
10481 *dlist = stmts;
10483 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10484 vinit = fd->loop.n1;
10485 if (cond_code == EQ_EXPR
10486 && tree_fits_shwi_p (fd->loop.n2)
10487 && ! integer_zerop (fd->loop.n2))
10488 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10489 else
10490 vinit = unshare_expr (vinit);
10492 /* Initialize the iterator variable, so that threads that don't execute
10493 any iterations don't execute the lastprivate clauses by accident. */
10494 gimplify_assign (fd->loop.v, vinit, body_p);
10498 /* OpenACC privatization.
10500 Or, in other words, *sharing* at the respective OpenACC level of
10501 parallelism.
10503 From a correctness perspective, a non-addressable variable can't be accessed
10504 outside the current thread, so it can go in a (faster than shared memory)
10505 register -- though that register may need to be broadcast in some
10506 circumstances. A variable can only meaningfully be "shared" across workers
10507 or vector lanes if its address is taken, e.g. by a call to an atomic
10508 builtin.
10510 From an optimisation perspective, the answer might be fuzzier: maybe
10511 sometimes, using shared memory directly would be faster than
10512 broadcasting. */
10514 static void
10515 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10516 const location_t loc, const tree c,
10517 const tree decl)
10519 const dump_user_location_t d_u_loc
10520 = dump_user_location_t::from_location_t (loc);
10521 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10522 #if __GNUC__ >= 10
10523 # pragma GCC diagnostic push
10524 # pragma GCC diagnostic ignored "-Wformat"
10525 #endif
10526 dump_printf_loc (l_dump_flags, d_u_loc,
10527 "variable %<%T%> ", decl);
10528 #if __GNUC__ >= 10
10529 # pragma GCC diagnostic pop
10530 #endif
10531 if (c)
10532 dump_printf (l_dump_flags,
10533 "in %qs clause ",
10534 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10535 else
10536 dump_printf (l_dump_flags,
10537 "declared in block ");
10540 static bool
10541 oacc_privatization_candidate_p (const location_t loc, const tree c,
10542 const tree decl)
10544 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10546 /* There is some differentiation depending on block vs. clause. */
10547 bool block = !c;
10549 bool res = true;
10551 if (res && !VAR_P (decl))
10553 res = false;
10555 if (dump_enabled_p ())
10557 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10558 dump_printf (l_dump_flags,
10559 "potentially has improper OpenACC privatization level: %qs\n",
10560 get_tree_code_name (TREE_CODE (decl)));
10564 if (res && block && TREE_STATIC (decl))
10566 res = false;
10568 if (dump_enabled_p ())
10570 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10571 dump_printf (l_dump_flags,
10572 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10573 "static");
10577 if (res && block && DECL_EXTERNAL (decl))
10579 res = false;
10581 if (dump_enabled_p ())
10583 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10584 dump_printf (l_dump_flags,
10585 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10586 "external");
10590 if (res && !TREE_ADDRESSABLE (decl))
10592 res = false;
10594 if (dump_enabled_p ())
10596 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10597 dump_printf (l_dump_flags,
10598 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10599 "not addressable");
10603 if (res)
10605 if (dump_enabled_p ())
10607 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10608 dump_printf (l_dump_flags,
10609 "is candidate for adjusting OpenACC privatization level\n");
10613 if (dump_file && (dump_flags & TDF_DETAILS))
10615 print_generic_decl (dump_file, decl, dump_flags);
10616 fprintf (dump_file, "\n");
10619 return res;
10622 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10623 CTX. */
10625 static void
10626 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10628 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10629 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10631 tree decl = OMP_CLAUSE_DECL (c);
10633 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c, decl))
10634 continue;
10636 gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10637 ctx->oacc_privatization_candidates.safe_push (decl);
10641 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10642 CTX. */
10644 static void
10645 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10647 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10649 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL, decl))
10650 continue;
10652 gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10653 ctx->oacc_privatization_candidates.safe_push (decl);
10657 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10659 static tree
10660 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10661 struct walk_stmt_info *wi)
10663 gimple *stmt = gsi_stmt (*gsi_p);
10665 *handled_ops_p = true;
10666 switch (gimple_code (stmt))
10668 WALK_SUBSTMTS;
10670 case GIMPLE_OMP_FOR:
10671 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10672 && gimple_omp_for_combined_into_p (stmt))
10673 *handled_ops_p = false;
10674 break;
10676 case GIMPLE_OMP_SCAN:
10677 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10678 return integer_zero_node;
10679 default:
10680 break;
10682 return NULL;
10685 /* Helper function for lower_omp_for, add transformations for a worksharing
10686 loop with scan directives inside of it.
10687 For worksharing loop not combined with simd, transform:
10688 #pragma omp for reduction(inscan,+:r) private(i)
10689 for (i = 0; i < n; i = i + 1)
10692 update (r);
10694 #pragma omp scan inclusive(r)
10696 use (r);
10700 into two worksharing loops + code to merge results:
10702 num_threads = omp_get_num_threads ();
10703 thread_num = omp_get_thread_num ();
10704 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10705 <D.2099>:
10706 var2 = r;
10707 goto <D.2101>;
10708 <D.2100>:
10709 // For UDRs this is UDR init, or if ctors are needed, copy from
10710 // var3 that has been constructed to contain the neutral element.
10711 var2 = 0;
10712 <D.2101>:
10713 ivar = 0;
10714 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10715 // a shared array with num_threads elements and rprivb to a local array
10716 // number of elements equal to the number of (contiguous) iterations the
10717 // current thread will perform. controlb and controlp variables are
10718 // temporaries to handle deallocation of rprivb at the end of second
10719 // GOMP_FOR.
10720 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10721 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10722 for (i = 0; i < n; i = i + 1)
10725 // For UDRs this is UDR init or copy from var3.
10726 r = 0;
10727 // This is the input phase from user code.
10728 update (r);
10731 // For UDRs this is UDR merge.
10732 var2 = var2 + r;
10733 // Rather than handing it over to the user, save to local thread's
10734 // array.
10735 rprivb[ivar] = var2;
10736 // For exclusive scan, the above two statements are swapped.
10737 ivar = ivar + 1;
10740 // And remember the final value from this thread's into the shared
10741 // rpriva array.
10742 rpriva[(sizetype) thread_num] = var2;
10743 // If more than one thread, compute using Work-Efficient prefix sum
10744 // the inclusive parallel scan of the rpriva array.
10745 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10746 <D.2102>:
10747 GOMP_barrier ();
10748 down = 0;
10749 k = 1;
10750 num_threadsu = (unsigned int) num_threads;
10751 thread_numup1 = (unsigned int) thread_num + 1;
10752 <D.2108>:
10753 twok = k << 1;
10754 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10755 <D.2110>:
10756 down = 4294967295;
10757 k = k >> 1;
10758 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10759 <D.2112>:
10760 k = k >> 1;
10761 <D.2111>:
10762 twok = k << 1;
10763 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10764 mul = REALPART_EXPR <cplx>;
10765 ovf = IMAGPART_EXPR <cplx>;
10766 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10767 <D.2116>:
10768 andv = k & down;
10769 andvm1 = andv + 4294967295;
10770 l = mul + andvm1;
10771 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10772 <D.2120>:
10773 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10774 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10775 rpriva[l] = rpriva[l - k] + rpriva[l];
10776 <D.2117>:
10777 if (down == 0) goto <D.2121>; else goto <D.2122>;
10778 <D.2121>:
10779 k = k << 1;
10780 goto <D.2123>;
10781 <D.2122>:
10782 k = k >> 1;
10783 <D.2123>:
10784 GOMP_barrier ();
10785 if (k != 0) goto <D.2108>; else goto <D.2103>;
10786 <D.2103>:
10787 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10788 <D.2124>:
10789 // For UDRs this is UDR init or copy from var3.
10790 var2 = 0;
10791 goto <D.2126>;
10792 <D.2125>:
10793 var2 = rpriva[thread_num - 1];
10794 <D.2126>:
10795 ivar = 0;
10796 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10797 reduction(inscan,+:r) private(i)
10798 for (i = 0; i < n; i = i + 1)
10801 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10802 r = var2 + rprivb[ivar];
10805 // This is the scan phase from user code.
10806 use (r);
10807 // Plus a bump of the iterator.
10808 ivar = ivar + 1;
10810 } */
10812 static void
10813 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10814 struct omp_for_data *fd, omp_context *ctx)
10816 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10817 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10819 gimple_seq body = gimple_omp_body (stmt);
10820 gimple_stmt_iterator input1_gsi = gsi_none ();
10821 struct walk_stmt_info wi;
10822 memset (&wi, 0, sizeof (wi));
10823 wi.val_only = true;
10824 wi.info = (void *) &input1_gsi;
10825 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10826 gcc_assert (!gsi_end_p (input1_gsi));
10828 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10829 gimple_stmt_iterator gsi = input1_gsi;
10830 gsi_next (&gsi);
10831 gimple_stmt_iterator scan1_gsi = gsi;
10832 gimple *scan_stmt1 = gsi_stmt (gsi);
10833 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10835 gimple_seq input_body = gimple_omp_body (input_stmt1);
10836 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10837 gimple_omp_set_body (input_stmt1, NULL);
10838 gimple_omp_set_body (scan_stmt1, NULL);
10839 gimple_omp_set_body (stmt, NULL);
10841 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10842 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10843 gimple_omp_set_body (stmt, body);
10844 gimple_omp_set_body (input_stmt1, input_body);
10846 gimple_stmt_iterator input2_gsi = gsi_none ();
10847 memset (&wi, 0, sizeof (wi));
10848 wi.val_only = true;
10849 wi.info = (void *) &input2_gsi;
10850 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10851 gcc_assert (!gsi_end_p (input2_gsi));
10853 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10854 gsi = input2_gsi;
10855 gsi_next (&gsi);
10856 gimple_stmt_iterator scan2_gsi = gsi;
10857 gimple *scan_stmt2 = gsi_stmt (gsi);
10858 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10859 gimple_omp_set_body (scan_stmt2, scan_body);
10861 gimple_stmt_iterator input3_gsi = gsi_none ();
10862 gimple_stmt_iterator scan3_gsi = gsi_none ();
10863 gimple_stmt_iterator input4_gsi = gsi_none ();
10864 gimple_stmt_iterator scan4_gsi = gsi_none ();
10865 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10866 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10867 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10868 if (is_for_simd)
10870 memset (&wi, 0, sizeof (wi));
10871 wi.val_only = true;
10872 wi.info = (void *) &input3_gsi;
10873 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10874 gcc_assert (!gsi_end_p (input3_gsi));
10876 input_stmt3 = gsi_stmt (input3_gsi);
10877 gsi = input3_gsi;
10878 gsi_next (&gsi);
10879 scan3_gsi = gsi;
10880 scan_stmt3 = gsi_stmt (gsi);
10881 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10883 memset (&wi, 0, sizeof (wi));
10884 wi.val_only = true;
10885 wi.info = (void *) &input4_gsi;
10886 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10887 gcc_assert (!gsi_end_p (input4_gsi));
10889 input_stmt4 = gsi_stmt (input4_gsi);
10890 gsi = input4_gsi;
10891 gsi_next (&gsi);
10892 scan4_gsi = gsi;
10893 scan_stmt4 = gsi_stmt (gsi);
10894 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10896 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10897 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10900 tree num_threads = create_tmp_var (integer_type_node);
10901 tree thread_num = create_tmp_var (integer_type_node);
10902 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10903 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10904 gimple *g = gimple_build_call (nthreads_decl, 0);
10905 gimple_call_set_lhs (g, num_threads);
10906 gimple_seq_add_stmt (body_p, g);
10907 g = gimple_build_call (threadnum_decl, 0);
10908 gimple_call_set_lhs (g, thread_num);
10909 gimple_seq_add_stmt (body_p, g);
10911 tree ivar = create_tmp_var (sizetype);
10912 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10913 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10914 tree k = create_tmp_var (unsigned_type_node);
10915 tree l = create_tmp_var (unsigned_type_node);
10917 gimple_seq clist = NULL, mdlist = NULL;
10918 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10919 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10920 gimple_seq scan1_list = NULL, input2_list = NULL;
10921 gimple_seq last_list = NULL, reduc_list = NULL;
10922 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10923 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10924 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10926 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10927 tree var = OMP_CLAUSE_DECL (c);
10928 tree new_var = lookup_decl (var, ctx);
10929 tree var3 = NULL_TREE;
10930 tree new_vard = new_var;
10931 if (omp_privatize_by_reference (var))
10932 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10933 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10935 var3 = maybe_lookup_decl (new_vard, ctx);
10936 if (var3 == new_vard)
10937 var3 = NULL_TREE;
10940 tree ptype = build_pointer_type (TREE_TYPE (new_var));
10941 tree rpriva = create_tmp_var (ptype);
10942 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10943 OMP_CLAUSE_DECL (nc) = rpriva;
10944 *cp1 = nc;
10945 cp1 = &OMP_CLAUSE_CHAIN (nc);
10947 tree rprivb = create_tmp_var (ptype);
10948 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10949 OMP_CLAUSE_DECL (nc) = rprivb;
10950 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10951 *cp1 = nc;
10952 cp1 = &OMP_CLAUSE_CHAIN (nc);
10954 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10955 if (new_vard != new_var)
10956 TREE_ADDRESSABLE (var2) = 1;
10957 gimple_add_tmp_var (var2);
10959 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
10960 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10961 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10962 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10963 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
10965 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
10966 thread_num, integer_minus_one_node);
10967 x = fold_convert_loc (clause_loc, sizetype, x);
10968 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10969 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10970 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10971 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
10973 x = fold_convert_loc (clause_loc, sizetype, l);
10974 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10975 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10976 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10977 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
10979 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
10980 x = fold_convert_loc (clause_loc, sizetype, x);
10981 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10982 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10983 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10984 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
10986 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
10987 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10988 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
10989 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
10991 tree var4 = is_for_simd ? new_var : var2;
10992 tree var5 = NULL_TREE, var6 = NULL_TREE;
10993 if (is_for_simd)
10995 var5 = lookup_decl (var, input_simd_ctx);
10996 var6 = lookup_decl (var, scan_simd_ctx);
10997 if (new_vard != new_var)
10999 var5 = build_simple_mem_ref_loc (clause_loc, var5);
11000 var6 = build_simple_mem_ref_loc (clause_loc, var6);
11003 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11005 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
11006 tree val = var2;
11008 x = lang_hooks.decls.omp_clause_default_ctor
11009 (c, var2, build_outer_var_ref (var, ctx));
11010 if (x)
11011 gimplify_and_add (x, &clist);
11013 x = build_outer_var_ref (var, ctx);
11014 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
11016 gimplify_and_add (x, &thr01_list);
11018 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
11019 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
11020 if (var3)
11022 x = unshare_expr (var4);
11023 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11024 gimplify_and_add (x, &thrn1_list);
11025 x = unshare_expr (var4);
11026 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11027 gimplify_and_add (x, &thr02_list);
11029 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
11031 /* Otherwise, assign to it the identity element. */
11032 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11033 tseq = copy_gimple_seq_and_replace_locals (tseq);
11034 if (!is_for_simd)
11036 if (new_vard != new_var)
11037 val = build_fold_addr_expr_loc (clause_loc, val);
11038 SET_DECL_VALUE_EXPR (new_vard, val);
11039 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11041 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11042 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11043 lower_omp (&tseq, ctx);
11044 gimple_seq_add_seq (&thrn1_list, tseq);
11045 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11046 lower_omp (&tseq, ctx);
11047 gimple_seq_add_seq (&thr02_list, tseq);
11048 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11049 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11050 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11051 if (y)
11052 SET_DECL_VALUE_EXPR (new_vard, y);
11053 else
11055 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11056 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11060 x = unshare_expr (var4);
11061 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11062 gimplify_and_add (x, &thrn2_list);
11064 if (is_for_simd)
11066 x = unshare_expr (rprivb_ref);
11067 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11068 gimplify_and_add (x, &scan1_list);
11070 else
11072 if (ctx->scan_exclusive)
11074 x = unshare_expr (rprivb_ref);
11075 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11076 gimplify_and_add (x, &scan1_list);
11079 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11080 tseq = copy_gimple_seq_and_replace_locals (tseq);
11081 SET_DECL_VALUE_EXPR (placeholder, var2);
11082 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11083 lower_omp (&tseq, ctx);
11084 gimple_seq_add_seq (&scan1_list, tseq);
11086 if (ctx->scan_inclusive)
11088 x = unshare_expr (rprivb_ref);
11089 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11090 gimplify_and_add (x, &scan1_list);
11094 x = unshare_expr (rpriva_ref);
11095 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11096 unshare_expr (var4));
11097 gimplify_and_add (x, &mdlist);
11099 x = unshare_expr (is_for_simd ? var6 : new_var);
11100 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11101 gimplify_and_add (x, &input2_list);
11103 val = rprivb_ref;
11104 if (new_vard != new_var)
11105 val = build_fold_addr_expr_loc (clause_loc, val);
11107 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11108 tseq = copy_gimple_seq_and_replace_locals (tseq);
11109 SET_DECL_VALUE_EXPR (new_vard, val);
11110 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11111 if (is_for_simd)
11113 SET_DECL_VALUE_EXPR (placeholder, var6);
11114 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11116 else
11117 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11118 lower_omp (&tseq, ctx);
11119 if (y)
11120 SET_DECL_VALUE_EXPR (new_vard, y);
11121 else
11123 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11124 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11126 if (!is_for_simd)
11128 SET_DECL_VALUE_EXPR (placeholder, new_var);
11129 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11130 lower_omp (&tseq, ctx);
11132 gimple_seq_add_seq (&input2_list, tseq);
11134 x = build_outer_var_ref (var, ctx);
11135 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11136 gimplify_and_add (x, &last_list);
11138 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11139 gimplify_and_add (x, &reduc_list);
11140 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11141 tseq = copy_gimple_seq_and_replace_locals (tseq);
11142 val = rprival_ref;
11143 if (new_vard != new_var)
11144 val = build_fold_addr_expr_loc (clause_loc, val);
11145 SET_DECL_VALUE_EXPR (new_vard, val);
11146 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11147 SET_DECL_VALUE_EXPR (placeholder, var2);
11148 lower_omp (&tseq, ctx);
11149 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11150 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11151 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11152 if (y)
11153 SET_DECL_VALUE_EXPR (new_vard, y);
11154 else
11156 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11157 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11159 gimple_seq_add_seq (&reduc_list, tseq);
11160 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11161 gimplify_and_add (x, &reduc_list);
11163 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11164 if (x)
11165 gimplify_and_add (x, dlist);
11167 else
11169 x = build_outer_var_ref (var, ctx);
11170 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11172 x = omp_reduction_init (c, TREE_TYPE (new_var));
11173 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11174 &thrn1_list);
11175 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11177 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11179 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11180 if (code == MINUS_EXPR)
11181 code = PLUS_EXPR;
11183 if (is_for_simd)
11184 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11185 else
11187 if (ctx->scan_exclusive)
11188 gimplify_assign (unshare_expr (rprivb_ref), var2,
11189 &scan1_list);
11190 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11191 gimplify_assign (var2, x, &scan1_list);
11192 if (ctx->scan_inclusive)
11193 gimplify_assign (unshare_expr (rprivb_ref), var2,
11194 &scan1_list);
11197 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11198 &mdlist);
11200 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11201 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11203 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11204 &last_list);
11206 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11207 unshare_expr (rprival_ref));
11208 gimplify_assign (rprival_ref, x, &reduc_list);
11212 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11213 gimple_seq_add_stmt (&scan1_list, g);
11214 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11215 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11216 ? scan_stmt4 : scan_stmt2), g);
11218 tree controlb = create_tmp_var (boolean_type_node);
11219 tree controlp = create_tmp_var (ptr_type_node);
11220 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11221 OMP_CLAUSE_DECL (nc) = controlb;
11222 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11223 *cp1 = nc;
11224 cp1 = &OMP_CLAUSE_CHAIN (nc);
11225 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11226 OMP_CLAUSE_DECL (nc) = controlp;
11227 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11228 *cp1 = nc;
11229 cp1 = &OMP_CLAUSE_CHAIN (nc);
11230 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11231 OMP_CLAUSE_DECL (nc) = controlb;
11232 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11233 *cp2 = nc;
11234 cp2 = &OMP_CLAUSE_CHAIN (nc);
11235 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11236 OMP_CLAUSE_DECL (nc) = controlp;
11237 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11238 *cp2 = nc;
11239 cp2 = &OMP_CLAUSE_CHAIN (nc);
11241 *cp1 = gimple_omp_for_clauses (stmt);
11242 gimple_omp_for_set_clauses (stmt, new_clauses1);
11243 *cp2 = gimple_omp_for_clauses (new_stmt);
11244 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11246 if (is_for_simd)
11248 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11249 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11251 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11252 GSI_SAME_STMT);
11253 gsi_remove (&input3_gsi, true);
11254 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11255 GSI_SAME_STMT);
11256 gsi_remove (&scan3_gsi, true);
11257 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11258 GSI_SAME_STMT);
11259 gsi_remove (&input4_gsi, true);
11260 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11261 GSI_SAME_STMT);
11262 gsi_remove (&scan4_gsi, true);
11264 else
11266 gimple_omp_set_body (scan_stmt1, scan1_list);
11267 gimple_omp_set_body (input_stmt2, input2_list);
11270 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11271 GSI_SAME_STMT);
11272 gsi_remove (&input1_gsi, true);
11273 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11274 GSI_SAME_STMT);
11275 gsi_remove (&scan1_gsi, true);
11276 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11277 GSI_SAME_STMT);
11278 gsi_remove (&input2_gsi, true);
11279 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11280 GSI_SAME_STMT);
11281 gsi_remove (&scan2_gsi, true);
11283 gimple_seq_add_seq (body_p, clist);
11285 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11286 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11287 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11288 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11289 gimple_seq_add_stmt (body_p, g);
11290 g = gimple_build_label (lab1);
11291 gimple_seq_add_stmt (body_p, g);
11292 gimple_seq_add_seq (body_p, thr01_list);
11293 g = gimple_build_goto (lab3);
11294 gimple_seq_add_stmt (body_p, g);
11295 g = gimple_build_label (lab2);
11296 gimple_seq_add_stmt (body_p, g);
11297 gimple_seq_add_seq (body_p, thrn1_list);
11298 g = gimple_build_label (lab3);
11299 gimple_seq_add_stmt (body_p, g);
11301 g = gimple_build_assign (ivar, size_zero_node);
11302 gimple_seq_add_stmt (body_p, g);
11304 gimple_seq_add_stmt (body_p, stmt);
11305 gimple_seq_add_seq (body_p, body);
11306 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11307 fd->loop.v));
11309 g = gimple_build_omp_return (true);
11310 gimple_seq_add_stmt (body_p, g);
11311 gimple_seq_add_seq (body_p, mdlist);
11313 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11314 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11315 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11316 gimple_seq_add_stmt (body_p, g);
11317 g = gimple_build_label (lab1);
11318 gimple_seq_add_stmt (body_p, g);
11320 g = omp_build_barrier (NULL);
11321 gimple_seq_add_stmt (body_p, g);
11323 tree down = create_tmp_var (unsigned_type_node);
11324 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11325 gimple_seq_add_stmt (body_p, g);
11327 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11328 gimple_seq_add_stmt (body_p, g);
11330 tree num_threadsu = create_tmp_var (unsigned_type_node);
11331 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11332 gimple_seq_add_stmt (body_p, g);
11334 tree thread_numu = create_tmp_var (unsigned_type_node);
11335 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11336 gimple_seq_add_stmt (body_p, g);
11338 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11339 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11340 build_int_cst (unsigned_type_node, 1));
11341 gimple_seq_add_stmt (body_p, g);
11343 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11344 g = gimple_build_label (lab3);
11345 gimple_seq_add_stmt (body_p, g);
11347 tree twok = create_tmp_var (unsigned_type_node);
11348 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11349 gimple_seq_add_stmt (body_p, g);
11351 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11352 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11353 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11354 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11355 gimple_seq_add_stmt (body_p, g);
11356 g = gimple_build_label (lab4);
11357 gimple_seq_add_stmt (body_p, g);
11358 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11359 gimple_seq_add_stmt (body_p, g);
11360 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11361 gimple_seq_add_stmt (body_p, g);
11363 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11364 gimple_seq_add_stmt (body_p, g);
11365 g = gimple_build_label (lab6);
11366 gimple_seq_add_stmt (body_p, g);
11368 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11369 gimple_seq_add_stmt (body_p, g);
11371 g = gimple_build_label (lab5);
11372 gimple_seq_add_stmt (body_p, g);
11374 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11375 gimple_seq_add_stmt (body_p, g);
11377 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11378 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11379 gimple_call_set_lhs (g, cplx);
11380 gimple_seq_add_stmt (body_p, g);
11381 tree mul = create_tmp_var (unsigned_type_node);
11382 g = gimple_build_assign (mul, REALPART_EXPR,
11383 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11384 gimple_seq_add_stmt (body_p, g);
11385 tree ovf = create_tmp_var (unsigned_type_node);
11386 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11387 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11388 gimple_seq_add_stmt (body_p, g);
11390 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11391 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11392 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11393 lab7, lab8);
11394 gimple_seq_add_stmt (body_p, g);
11395 g = gimple_build_label (lab7);
11396 gimple_seq_add_stmt (body_p, g);
11398 tree andv = create_tmp_var (unsigned_type_node);
11399 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11400 gimple_seq_add_stmt (body_p, g);
11401 tree andvm1 = create_tmp_var (unsigned_type_node);
11402 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11403 build_minus_one_cst (unsigned_type_node));
11404 gimple_seq_add_stmt (body_p, g);
11406 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11407 gimple_seq_add_stmt (body_p, g);
11409 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11410 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11411 gimple_seq_add_stmt (body_p, g);
11412 g = gimple_build_label (lab9);
11413 gimple_seq_add_stmt (body_p, g);
11414 gimple_seq_add_seq (body_p, reduc_list);
11415 g = gimple_build_label (lab8);
11416 gimple_seq_add_stmt (body_p, g);
11418 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11419 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11420 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11421 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11422 lab10, lab11);
11423 gimple_seq_add_stmt (body_p, g);
11424 g = gimple_build_label (lab10);
11425 gimple_seq_add_stmt (body_p, g);
11426 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11427 gimple_seq_add_stmt (body_p, g);
11428 g = gimple_build_goto (lab12);
11429 gimple_seq_add_stmt (body_p, g);
11430 g = gimple_build_label (lab11);
11431 gimple_seq_add_stmt (body_p, g);
11432 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11433 gimple_seq_add_stmt (body_p, g);
11434 g = gimple_build_label (lab12);
11435 gimple_seq_add_stmt (body_p, g);
11437 g = omp_build_barrier (NULL);
11438 gimple_seq_add_stmt (body_p, g);
11440 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11441 lab3, lab2);
11442 gimple_seq_add_stmt (body_p, g);
11444 g = gimple_build_label (lab2);
11445 gimple_seq_add_stmt (body_p, g);
11447 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11448 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11449 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11450 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11451 gimple_seq_add_stmt (body_p, g);
11452 g = gimple_build_label (lab1);
11453 gimple_seq_add_stmt (body_p, g);
11454 gimple_seq_add_seq (body_p, thr02_list);
11455 g = gimple_build_goto (lab3);
11456 gimple_seq_add_stmt (body_p, g);
11457 g = gimple_build_label (lab2);
11458 gimple_seq_add_stmt (body_p, g);
11459 gimple_seq_add_seq (body_p, thrn2_list);
11460 g = gimple_build_label (lab3);
11461 gimple_seq_add_stmt (body_p, g);
11463 g = gimple_build_assign (ivar, size_zero_node);
11464 gimple_seq_add_stmt (body_p, g);
11465 gimple_seq_add_stmt (body_p, new_stmt);
11466 gimple_seq_add_seq (body_p, new_body);
11468 gimple_seq new_dlist = NULL;
11469 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11470 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11471 tree num_threadsm1 = create_tmp_var (integer_type_node);
11472 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11473 integer_minus_one_node);
11474 gimple_seq_add_stmt (&new_dlist, g);
11475 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11476 gimple_seq_add_stmt (&new_dlist, g);
11477 g = gimple_build_label (lab1);
11478 gimple_seq_add_stmt (&new_dlist, g);
11479 gimple_seq_add_seq (&new_dlist, last_list);
11480 g = gimple_build_label (lab2);
11481 gimple_seq_add_stmt (&new_dlist, g);
11482 gimple_seq_add_seq (&new_dlist, *dlist);
11483 *dlist = new_dlist;
11486 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11487 the addresses of variables to be made private at the surrounding
11488 parallelism level. Such functions appear in the gimple code stream in two
11489 forms, e.g. for a partitioned loop:
11491 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11492 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11493 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11494 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11496 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11497 not as part of a HEAD_MARK sequence:
11499 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11501 For such stand-alone appearances, the 3rd argument is always 0, denoting
11502 gang partitioning. */
11504 static gcall *
11505 lower_oacc_private_marker (omp_context *ctx)
11507 if (ctx->oacc_privatization_candidates.length () == 0)
11508 return NULL;
11510 auto_vec<tree, 5> args;
11512 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11513 args.quick_push (integer_zero_node);
11514 args.quick_push (integer_minus_one_node);
11516 int i;
11517 tree decl;
11518 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11520 for (omp_context *thisctx = ctx; thisctx; thisctx = thisctx->outer)
11522 tree inner_decl = maybe_lookup_decl (decl, thisctx);
11523 if (inner_decl)
11525 decl = inner_decl;
11526 break;
11529 gcc_checking_assert (decl);
11531 tree addr = build_fold_addr_expr (decl);
11532 args.safe_push (addr);
11535 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11538 /* Lower code for an OMP loop directive. */
11540 static void
11541 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11543 tree *rhs_p, block;
11544 struct omp_for_data fd, *fdp = NULL;
11545 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11546 gbind *new_stmt;
11547 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11548 gimple_seq cnt_list = NULL, clist = NULL;
11549 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11550 size_t i;
11552 push_gimplify_context ();
11554 if (is_gimple_omp_oacc (ctx->stmt))
11555 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11557 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11559 block = make_node (BLOCK);
11560 new_stmt = gimple_build_bind (NULL, NULL, block);
11561 /* Replace at gsi right away, so that 'stmt' is no member
11562 of a sequence anymore as we're going to add to a different
11563 one below. */
11564 gsi_replace (gsi_p, new_stmt, true);
11566 /* Move declaration of temporaries in the loop body before we make
11567 it go away. */
11568 omp_for_body = gimple_omp_body (stmt);
11569 if (!gimple_seq_empty_p (omp_for_body)
11570 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11572 gbind *inner_bind
11573 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11574 tree vars = gimple_bind_vars (inner_bind);
11575 if (is_gimple_omp_oacc (ctx->stmt))
11576 oacc_privatization_scan_decl_chain (ctx, vars);
11577 gimple_bind_append_vars (new_stmt, vars);
11578 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11579 keep them on the inner_bind and it's block. */
11580 gimple_bind_set_vars (inner_bind, NULL_TREE);
11581 if (gimple_bind_block (inner_bind))
11582 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11585 if (gimple_omp_for_combined_into_p (stmt))
11587 omp_extract_for_data (stmt, &fd, NULL);
11588 fdp = &fd;
11590 /* We need two temporaries with fd.loop.v type (istart/iend)
11591 and then (fd.collapse - 1) temporaries with the same
11592 type for count2 ... countN-1 vars if not constant. */
11593 size_t count = 2;
11594 tree type = fd.iter_type;
11595 if (fd.collapse > 1
11596 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11597 count += fd.collapse - 1;
11598 size_t count2 = 0;
11599 tree type2 = NULL_TREE;
11600 bool taskreg_for
11601 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11602 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11603 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11604 tree simtc = NULL;
11605 tree clauses = *pc;
11606 if (fd.collapse > 1
11607 && fd.non_rect
11608 && fd.last_nonrect == fd.first_nonrect + 1
11609 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11610 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11611 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11613 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11614 type2 = TREE_TYPE (v);
11615 count++;
11616 count2 = 3;
11618 if (taskreg_for)
11619 outerc
11620 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11621 OMP_CLAUSE__LOOPTEMP_);
11622 if (ctx->simt_stmt)
11623 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11624 OMP_CLAUSE__LOOPTEMP_);
11625 for (i = 0; i < count + count2; i++)
11627 tree temp;
11628 if (taskreg_for)
11630 gcc_assert (outerc);
11631 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11632 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11633 OMP_CLAUSE__LOOPTEMP_);
11635 else
11637 /* If there are 2 adjacent SIMD stmts, one with _simt_
11638 clause, another without, make sure they have the same
11639 decls in _looptemp_ clauses, because the outer stmt
11640 they are combined into will look up just one inner_stmt. */
11641 if (ctx->simt_stmt)
11642 temp = OMP_CLAUSE_DECL (simtc);
11643 else
11644 temp = create_tmp_var (i >= count ? type2 : type);
11645 insert_decl_map (&ctx->outer->cb, temp, temp);
11647 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11648 OMP_CLAUSE_DECL (*pc) = temp;
11649 pc = &OMP_CLAUSE_CHAIN (*pc);
11650 if (ctx->simt_stmt)
11651 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11652 OMP_CLAUSE__LOOPTEMP_);
11654 *pc = clauses;
11657 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11658 dlist = NULL;
11659 body = NULL;
11660 tree rclauses
11661 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11662 OMP_CLAUSE_REDUCTION);
11663 tree rtmp = NULL_TREE;
11664 if (rclauses)
11666 tree type = build_pointer_type (pointer_sized_int_node);
11667 tree temp = create_tmp_var (type);
11668 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11669 OMP_CLAUSE_DECL (c) = temp;
11670 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11671 gimple_omp_for_set_clauses (stmt, c);
11672 lower_omp_task_reductions (ctx, OMP_FOR,
11673 gimple_omp_for_clauses (stmt),
11674 &tred_ilist, &tred_dlist);
11675 rclauses = c;
11676 rtmp = make_ssa_name (type);
11677 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11680 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11681 ctx);
11683 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11684 fdp);
11685 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11686 gimple_omp_for_pre_body (stmt));
11688 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11690 gcall *private_marker = NULL;
11691 if (is_gimple_omp_oacc (ctx->stmt)
11692 && !gimple_seq_empty_p (omp_for_body))
11693 private_marker = lower_oacc_private_marker (ctx);
11695 /* Lower the header expressions. At this point, we can assume that
11696 the header is of the form:
11698 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11700 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11701 using the .omp_data_s mapping, if needed. */
11702 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11704 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11705 if (TREE_CODE (*rhs_p) == TREE_VEC)
11707 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11708 TREE_VEC_ELT (*rhs_p, 1)
11709 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11710 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11711 TREE_VEC_ELT (*rhs_p, 2)
11712 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11714 else if (!is_gimple_min_invariant (*rhs_p))
11715 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11716 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11717 recompute_tree_invariant_for_addr_expr (*rhs_p);
11719 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11720 if (TREE_CODE (*rhs_p) == TREE_VEC)
11722 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11723 TREE_VEC_ELT (*rhs_p, 1)
11724 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11725 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11726 TREE_VEC_ELT (*rhs_p, 2)
11727 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11729 else if (!is_gimple_min_invariant (*rhs_p))
11730 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11731 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11732 recompute_tree_invariant_for_addr_expr (*rhs_p);
11734 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11735 if (!is_gimple_min_invariant (*rhs_p))
11736 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11738 if (rclauses)
11739 gimple_seq_add_seq (&tred_ilist, cnt_list);
11740 else
11741 gimple_seq_add_seq (&body, cnt_list);
11743 /* Once lowered, extract the bounds and clauses. */
11744 omp_extract_for_data (stmt, &fd, NULL);
11746 if (is_gimple_omp_oacc (ctx->stmt)
11747 && !ctx_in_oacc_kernels_region (ctx))
11748 lower_oacc_head_tail (gimple_location (stmt),
11749 gimple_omp_for_clauses (stmt), private_marker,
11750 &oacc_head, &oacc_tail, ctx);
11752 /* Add OpenACC partitioning and reduction markers just before the loop. */
11753 if (oacc_head)
11754 gimple_seq_add_seq (&body, oacc_head);
11756 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11758 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11759 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11760 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11761 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11763 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11764 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11765 OMP_CLAUSE_LINEAR_STEP (c)
11766 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11767 ctx);
11770 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11771 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11772 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11773 else
11775 gimple_seq_add_stmt (&body, stmt);
11776 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11779 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11780 fd.loop.v));
11782 /* After the loop, add exit clauses. */
11783 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11785 if (clist)
11787 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11788 gcall *g = gimple_build_call (fndecl, 0);
11789 gimple_seq_add_stmt (&body, g);
11790 gimple_seq_add_seq (&body, clist);
11791 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11792 g = gimple_build_call (fndecl, 0);
11793 gimple_seq_add_stmt (&body, g);
11796 if (ctx->cancellable)
11797 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11799 gimple_seq_add_seq (&body, dlist);
11801 if (rclauses)
11803 gimple_seq_add_seq (&tred_ilist, body);
11804 body = tred_ilist;
11807 body = maybe_catch_exception (body);
11809 /* Region exit marker goes at the end of the loop body. */
11810 gimple *g = gimple_build_omp_return (fd.have_nowait);
11811 gimple_seq_add_stmt (&body, g);
11813 gimple_seq_add_seq (&body, tred_dlist);
11815 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11817 if (rclauses)
11818 OMP_CLAUSE_DECL (rclauses) = rtmp;
11820 /* Add OpenACC joining and reduction markers just after the loop. */
11821 if (oacc_tail)
11822 gimple_seq_add_seq (&body, oacc_tail);
11824 pop_gimplify_context (new_stmt);
11826 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11827 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11828 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11829 if (BLOCK_VARS (block))
11830 TREE_USED (block) = 1;
11832 gimple_bind_set_body (new_stmt, body);
11833 gimple_omp_set_body (stmt, NULL);
11834 gimple_omp_for_set_pre_body (stmt, NULL);
11837 /* Callback for walk_stmts. Check if the current statement only contains
11838 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11840 static tree
11841 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11842 bool *handled_ops_p,
11843 struct walk_stmt_info *wi)
11845 int *info = (int *) wi->info;
11846 gimple *stmt = gsi_stmt (*gsi_p);
11848 *handled_ops_p = true;
11849 switch (gimple_code (stmt))
11851 WALK_SUBSTMTS;
11853 case GIMPLE_DEBUG:
11854 break;
11855 case GIMPLE_OMP_FOR:
11856 case GIMPLE_OMP_SECTIONS:
11857 *info = *info == 0 ? 1 : -1;
11858 break;
11859 default:
11860 *info = -1;
11861 break;
11863 return NULL;
11866 struct omp_taskcopy_context
11868 /* This field must be at the beginning, as we do "inheritance": Some
11869 callback functions for tree-inline.c (e.g., omp_copy_decl)
11870 receive a copy_body_data pointer that is up-casted to an
11871 omp_context pointer. */
11872 copy_body_data cb;
11873 omp_context *ctx;
11876 static tree
11877 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11879 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11881 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11882 return create_tmp_var (TREE_TYPE (var));
11884 return var;
11887 static tree
11888 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11890 tree name, new_fields = NULL, type, f;
11892 type = lang_hooks.types.make_type (RECORD_TYPE);
11893 name = DECL_NAME (TYPE_NAME (orig_type));
11894 name = build_decl (gimple_location (tcctx->ctx->stmt),
11895 TYPE_DECL, name, type);
11896 TYPE_NAME (type) = name;
11898 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11900 tree new_f = copy_node (f);
11901 DECL_CONTEXT (new_f) = type;
11902 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11903 TREE_CHAIN (new_f) = new_fields;
11904 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11905 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11906 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11907 &tcctx->cb, NULL);
11908 new_fields = new_f;
11909 tcctx->cb.decl_map->put (f, new_f);
11911 TYPE_FIELDS (type) = nreverse (new_fields);
11912 layout_type (type);
11913 return type;
11916 /* Create task copyfn. */
11918 static void
11919 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11921 struct function *child_cfun;
11922 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11923 tree record_type, srecord_type, bind, list;
11924 bool record_needs_remap = false, srecord_needs_remap = false;
11925 splay_tree_node n;
11926 struct omp_taskcopy_context tcctx;
11927 location_t loc = gimple_location (task_stmt);
11928 size_t looptempno = 0;
11930 child_fn = gimple_omp_task_copy_fn (task_stmt);
11931 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11932 gcc_assert (child_cfun->cfg == NULL);
11933 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11935 /* Reset DECL_CONTEXT on function arguments. */
11936 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11937 DECL_CONTEXT (t) = child_fn;
11939 /* Populate the function. */
11940 push_gimplify_context ();
11941 push_cfun (child_cfun);
11943 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11944 TREE_SIDE_EFFECTS (bind) = 1;
11945 list = NULL;
11946 DECL_SAVED_TREE (child_fn) = bind;
11947 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
11949 /* Remap src and dst argument types if needed. */
11950 record_type = ctx->record_type;
11951 srecord_type = ctx->srecord_type;
11952 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
11953 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11955 record_needs_remap = true;
11956 break;
11958 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
11959 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11961 srecord_needs_remap = true;
11962 break;
11965 if (record_needs_remap || srecord_needs_remap)
11967 memset (&tcctx, '\0', sizeof (tcctx));
11968 tcctx.cb.src_fn = ctx->cb.src_fn;
11969 tcctx.cb.dst_fn = child_fn;
11970 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
11971 gcc_checking_assert (tcctx.cb.src_node);
11972 tcctx.cb.dst_node = tcctx.cb.src_node;
11973 tcctx.cb.src_cfun = ctx->cb.src_cfun;
11974 tcctx.cb.copy_decl = task_copyfn_copy_decl;
11975 tcctx.cb.eh_lp_nr = 0;
11976 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
11977 tcctx.cb.decl_map = new hash_map<tree, tree>;
11978 tcctx.ctx = ctx;
11980 if (record_needs_remap)
11981 record_type = task_copyfn_remap_type (&tcctx, record_type);
11982 if (srecord_needs_remap)
11983 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
11985 else
11986 tcctx.cb.decl_map = NULL;
11988 arg = DECL_ARGUMENTS (child_fn);
11989 TREE_TYPE (arg) = build_pointer_type (record_type);
11990 sarg = DECL_CHAIN (arg);
11991 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
11993 /* First pass: initialize temporaries used in record_type and srecord_type
11994 sizes and field offsets. */
11995 if (tcctx.cb.decl_map)
11996 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11997 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11999 tree *p;
12001 decl = OMP_CLAUSE_DECL (c);
12002 p = tcctx.cb.decl_map->get (decl);
12003 if (p == NULL)
12004 continue;
12005 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12006 sf = (tree) n->value;
12007 sf = *tcctx.cb.decl_map->get (sf);
12008 src = build_simple_mem_ref_loc (loc, sarg);
12009 src = omp_build_component_ref (src, sf);
12010 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
12011 append_to_statement_list (t, &list);
12014 /* Second pass: copy shared var pointers and copy construct non-VLA
12015 firstprivate vars. */
12016 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12017 switch (OMP_CLAUSE_CODE (c))
12019 splay_tree_key key;
12020 case OMP_CLAUSE_SHARED:
12021 decl = OMP_CLAUSE_DECL (c);
12022 key = (splay_tree_key) decl;
12023 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
12024 key = (splay_tree_key) &DECL_UID (decl);
12025 n = splay_tree_lookup (ctx->field_map, key);
12026 if (n == NULL)
12027 break;
12028 f = (tree) n->value;
12029 if (tcctx.cb.decl_map)
12030 f = *tcctx.cb.decl_map->get (f);
12031 n = splay_tree_lookup (ctx->sfield_map, key);
12032 sf = (tree) n->value;
12033 if (tcctx.cb.decl_map)
12034 sf = *tcctx.cb.decl_map->get (sf);
12035 src = build_simple_mem_ref_loc (loc, sarg);
12036 src = omp_build_component_ref (src, sf);
12037 dst = build_simple_mem_ref_loc (loc, arg);
12038 dst = omp_build_component_ref (dst, f);
12039 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12040 append_to_statement_list (t, &list);
12041 break;
12042 case OMP_CLAUSE_REDUCTION:
12043 case OMP_CLAUSE_IN_REDUCTION:
12044 decl = OMP_CLAUSE_DECL (c);
12045 if (TREE_CODE (decl) == MEM_REF)
12047 decl = TREE_OPERAND (decl, 0);
12048 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12049 decl = TREE_OPERAND (decl, 0);
12050 if (TREE_CODE (decl) == INDIRECT_REF
12051 || TREE_CODE (decl) == ADDR_EXPR)
12052 decl = TREE_OPERAND (decl, 0);
12054 key = (splay_tree_key) decl;
12055 n = splay_tree_lookup (ctx->field_map, key);
12056 if (n == NULL)
12057 break;
12058 f = (tree) n->value;
12059 if (tcctx.cb.decl_map)
12060 f = *tcctx.cb.decl_map->get (f);
12061 n = splay_tree_lookup (ctx->sfield_map, key);
12062 sf = (tree) n->value;
12063 if (tcctx.cb.decl_map)
12064 sf = *tcctx.cb.decl_map->get (sf);
12065 src = build_simple_mem_ref_loc (loc, sarg);
12066 src = omp_build_component_ref (src, sf);
12067 if (decl != OMP_CLAUSE_DECL (c)
12068 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12069 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12070 src = build_simple_mem_ref_loc (loc, src);
12071 dst = build_simple_mem_ref_loc (loc, arg);
12072 dst = omp_build_component_ref (dst, f);
12073 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12074 append_to_statement_list (t, &list);
12075 break;
12076 case OMP_CLAUSE__LOOPTEMP_:
12077 /* Fields for first two _looptemp_ clauses are initialized by
12078 GOMP_taskloop*, the rest are handled like firstprivate. */
12079 if (looptempno < 2)
12081 looptempno++;
12082 break;
12084 /* FALLTHRU */
12085 case OMP_CLAUSE__REDUCTEMP_:
12086 case OMP_CLAUSE_FIRSTPRIVATE:
12087 decl = OMP_CLAUSE_DECL (c);
12088 if (is_variable_sized (decl))
12089 break;
12090 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12091 if (n == NULL)
12092 break;
12093 f = (tree) n->value;
12094 if (tcctx.cb.decl_map)
12095 f = *tcctx.cb.decl_map->get (f);
12096 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12097 if (n != NULL)
12099 sf = (tree) n->value;
12100 if (tcctx.cb.decl_map)
12101 sf = *tcctx.cb.decl_map->get (sf);
12102 src = build_simple_mem_ref_loc (loc, sarg);
12103 src = omp_build_component_ref (src, sf);
12104 if (use_pointer_for_field (decl, NULL)
12105 || omp_privatize_by_reference (decl))
12106 src = build_simple_mem_ref_loc (loc, src);
12108 else
12109 src = decl;
12110 dst = build_simple_mem_ref_loc (loc, arg);
12111 dst = omp_build_component_ref (dst, f);
12112 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12113 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12114 else
12116 if (ctx->allocate_map)
12117 if (tree *allocatorp = ctx->allocate_map->get (decl))
12119 tree allocator = *allocatorp;
12120 HOST_WIDE_INT ialign = 0;
12121 if (TREE_CODE (allocator) == TREE_LIST)
12123 ialign = tree_to_uhwi (TREE_VALUE (allocator));
12124 allocator = TREE_PURPOSE (allocator);
12126 if (TREE_CODE (allocator) != INTEGER_CST)
12128 n = splay_tree_lookup (ctx->sfield_map,
12129 (splay_tree_key) allocator);
12130 allocator = (tree) n->value;
12131 if (tcctx.cb.decl_map)
12132 allocator = *tcctx.cb.decl_map->get (allocator);
12133 tree a = build_simple_mem_ref_loc (loc, sarg);
12134 allocator = omp_build_component_ref (a, allocator);
12136 allocator = fold_convert (pointer_sized_int_node, allocator);
12137 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12138 tree align = build_int_cst (size_type_node,
12139 MAX (ialign,
12140 DECL_ALIGN_UNIT (decl)));
12141 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12142 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12143 allocator);
12144 ptr = fold_convert (TREE_TYPE (dst), ptr);
12145 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12146 append_to_statement_list (t, &list);
12147 dst = build_simple_mem_ref_loc (loc, dst);
12149 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12151 append_to_statement_list (t, &list);
12152 break;
12153 case OMP_CLAUSE_PRIVATE:
12154 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12155 break;
12156 decl = OMP_CLAUSE_DECL (c);
12157 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12158 f = (tree) n->value;
12159 if (tcctx.cb.decl_map)
12160 f = *tcctx.cb.decl_map->get (f);
12161 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12162 if (n != NULL)
12164 sf = (tree) n->value;
12165 if (tcctx.cb.decl_map)
12166 sf = *tcctx.cb.decl_map->get (sf);
12167 src = build_simple_mem_ref_loc (loc, sarg);
12168 src = omp_build_component_ref (src, sf);
12169 if (use_pointer_for_field (decl, NULL))
12170 src = build_simple_mem_ref_loc (loc, src);
12172 else
12173 src = decl;
12174 dst = build_simple_mem_ref_loc (loc, arg);
12175 dst = omp_build_component_ref (dst, f);
12176 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12177 append_to_statement_list (t, &list);
12178 break;
12179 default:
12180 break;
12183 /* Last pass: handle VLA firstprivates. */
12184 if (tcctx.cb.decl_map)
12185 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12186 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12188 tree ind, ptr, df;
12190 decl = OMP_CLAUSE_DECL (c);
12191 if (!is_variable_sized (decl))
12192 continue;
12193 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12194 if (n == NULL)
12195 continue;
12196 f = (tree) n->value;
12197 f = *tcctx.cb.decl_map->get (f);
12198 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12199 ind = DECL_VALUE_EXPR (decl);
12200 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12201 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12202 n = splay_tree_lookup (ctx->sfield_map,
12203 (splay_tree_key) TREE_OPERAND (ind, 0));
12204 sf = (tree) n->value;
12205 sf = *tcctx.cb.decl_map->get (sf);
12206 src = build_simple_mem_ref_loc (loc, sarg);
12207 src = omp_build_component_ref (src, sf);
12208 src = build_simple_mem_ref_loc (loc, src);
12209 dst = build_simple_mem_ref_loc (loc, arg);
12210 dst = omp_build_component_ref (dst, f);
12211 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12212 append_to_statement_list (t, &list);
12213 n = splay_tree_lookup (ctx->field_map,
12214 (splay_tree_key) TREE_OPERAND (ind, 0));
12215 df = (tree) n->value;
12216 df = *tcctx.cb.decl_map->get (df);
12217 ptr = build_simple_mem_ref_loc (loc, arg);
12218 ptr = omp_build_component_ref (ptr, df);
12219 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12220 build_fold_addr_expr_loc (loc, dst));
12221 append_to_statement_list (t, &list);
12224 t = build1 (RETURN_EXPR, void_type_node, NULL);
12225 append_to_statement_list (t, &list);
12227 if (tcctx.cb.decl_map)
12228 delete tcctx.cb.decl_map;
12229 pop_gimplify_context (NULL);
12230 BIND_EXPR_BODY (bind) = list;
12231 pop_cfun ();
12234 static void
12235 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12237 tree c, clauses;
12238 gimple *g;
12239 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
12241 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12242 gcc_assert (clauses);
12243 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12244 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12245 switch (OMP_CLAUSE_DEPEND_KIND (c))
12247 case OMP_CLAUSE_DEPEND_LAST:
12248 /* Lowering already done at gimplification. */
12249 return;
12250 case OMP_CLAUSE_DEPEND_IN:
12251 cnt[2]++;
12252 break;
12253 case OMP_CLAUSE_DEPEND_OUT:
12254 case OMP_CLAUSE_DEPEND_INOUT:
12255 cnt[0]++;
12256 break;
12257 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12258 cnt[1]++;
12259 break;
12260 case OMP_CLAUSE_DEPEND_DEPOBJ:
12261 cnt[3]++;
12262 break;
12263 case OMP_CLAUSE_DEPEND_SOURCE:
12264 case OMP_CLAUSE_DEPEND_SINK:
12265 /* FALLTHRU */
12266 default:
12267 gcc_unreachable ();
12269 if (cnt[1] || cnt[3])
12270 idx = 5;
12271 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
12272 tree type = build_array_type_nelts (ptr_type_node, total + idx);
12273 tree array = create_tmp_var (type);
12274 TREE_ADDRESSABLE (array) = 1;
12275 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12276 NULL_TREE);
12277 if (idx == 5)
12279 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12280 gimple_seq_add_stmt (iseq, g);
12281 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12282 NULL_TREE);
12284 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12285 gimple_seq_add_stmt (iseq, g);
12286 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12288 r = build4 (ARRAY_REF, ptr_type_node, array,
12289 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12290 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12291 gimple_seq_add_stmt (iseq, g);
12293 for (i = 0; i < 4; i++)
12295 if (cnt[i] == 0)
12296 continue;
12297 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12298 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12299 continue;
12300 else
12302 switch (OMP_CLAUSE_DEPEND_KIND (c))
12304 case OMP_CLAUSE_DEPEND_IN:
12305 if (i != 2)
12306 continue;
12307 break;
12308 case OMP_CLAUSE_DEPEND_OUT:
12309 case OMP_CLAUSE_DEPEND_INOUT:
12310 if (i != 0)
12311 continue;
12312 break;
12313 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12314 if (i != 1)
12315 continue;
12316 break;
12317 case OMP_CLAUSE_DEPEND_DEPOBJ:
12318 if (i != 3)
12319 continue;
12320 break;
12321 default:
12322 gcc_unreachable ();
12324 tree t = OMP_CLAUSE_DECL (c);
12325 t = fold_convert (ptr_type_node, t);
12326 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12327 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12328 NULL_TREE, NULL_TREE);
12329 g = gimple_build_assign (r, t);
12330 gimple_seq_add_stmt (iseq, g);
12333 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12334 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12335 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12336 OMP_CLAUSE_CHAIN (c) = *pclauses;
12337 *pclauses = c;
12338 tree clobber = build_clobber (type);
12339 g = gimple_build_assign (array, clobber);
12340 gimple_seq_add_stmt (oseq, g);
12343 /* Lower the OpenMP parallel or task directive in the current statement
12344 in GSI_P. CTX holds context information for the directive. */
12346 static void
12347 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12349 tree clauses;
12350 tree child_fn, t;
12351 gimple *stmt = gsi_stmt (*gsi_p);
12352 gbind *par_bind, *bind, *dep_bind = NULL;
12353 gimple_seq par_body;
12354 location_t loc = gimple_location (stmt);
12356 clauses = gimple_omp_taskreg_clauses (stmt);
12357 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12358 && gimple_omp_task_taskwait_p (stmt))
12360 par_bind = NULL;
12361 par_body = NULL;
12363 else
12365 par_bind
12366 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12367 par_body = gimple_bind_body (par_bind);
12369 child_fn = ctx->cb.dst_fn;
12370 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12371 && !gimple_omp_parallel_combined_p (stmt))
12373 struct walk_stmt_info wi;
12374 int ws_num = 0;
12376 memset (&wi, 0, sizeof (wi));
12377 wi.info = &ws_num;
12378 wi.val_only = true;
12379 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12380 if (ws_num == 1)
12381 gimple_omp_parallel_set_combined_p (stmt, true);
12383 gimple_seq dep_ilist = NULL;
12384 gimple_seq dep_olist = NULL;
12385 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12386 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12388 push_gimplify_context ();
12389 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12390 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12391 &dep_ilist, &dep_olist);
12394 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12395 && gimple_omp_task_taskwait_p (stmt))
12397 if (dep_bind)
12399 gsi_replace (gsi_p, dep_bind, true);
12400 gimple_bind_add_seq (dep_bind, dep_ilist);
12401 gimple_bind_add_stmt (dep_bind, stmt);
12402 gimple_bind_add_seq (dep_bind, dep_olist);
12403 pop_gimplify_context (dep_bind);
12405 return;
12408 if (ctx->srecord_type)
12409 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12411 gimple_seq tskred_ilist = NULL;
12412 gimple_seq tskred_olist = NULL;
12413 if ((is_task_ctx (ctx)
12414 && gimple_omp_task_taskloop_p (ctx->stmt)
12415 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12416 OMP_CLAUSE_REDUCTION))
12417 || (is_parallel_ctx (ctx)
12418 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12419 OMP_CLAUSE__REDUCTEMP_)))
12421 if (dep_bind == NULL)
12423 push_gimplify_context ();
12424 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12426 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12427 : OMP_PARALLEL,
12428 gimple_omp_taskreg_clauses (ctx->stmt),
12429 &tskred_ilist, &tskred_olist);
12432 push_gimplify_context ();
12434 gimple_seq par_olist = NULL;
12435 gimple_seq par_ilist = NULL;
12436 gimple_seq par_rlist = NULL;
12437 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12438 lower_omp (&par_body, ctx);
12439 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12440 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12442 /* Declare all the variables created by mapping and the variables
12443 declared in the scope of the parallel body. */
12444 record_vars_into (ctx->block_vars, child_fn);
12445 maybe_remove_omp_member_access_dummy_vars (par_bind);
12446 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12448 if (ctx->record_type)
12450 ctx->sender_decl
12451 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12452 : ctx->record_type, ".omp_data_o");
12453 DECL_NAMELESS (ctx->sender_decl) = 1;
12454 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12455 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12458 gimple_seq olist = NULL;
12459 gimple_seq ilist = NULL;
12460 lower_send_clauses (clauses, &ilist, &olist, ctx);
12461 lower_send_shared_vars (&ilist, &olist, ctx);
12463 if (ctx->record_type)
12465 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12466 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12467 clobber));
12470 /* Once all the expansions are done, sequence all the different
12471 fragments inside gimple_omp_body. */
12473 gimple_seq new_body = NULL;
12475 if (ctx->record_type)
12477 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12478 /* fixup_child_record_type might have changed receiver_decl's type. */
12479 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12480 gimple_seq_add_stmt (&new_body,
12481 gimple_build_assign (ctx->receiver_decl, t));
12484 gimple_seq_add_seq (&new_body, par_ilist);
12485 gimple_seq_add_seq (&new_body, par_body);
12486 gimple_seq_add_seq (&new_body, par_rlist);
12487 if (ctx->cancellable)
12488 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12489 gimple_seq_add_seq (&new_body, par_olist);
12490 new_body = maybe_catch_exception (new_body);
12491 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12492 gimple_seq_add_stmt (&new_body,
12493 gimple_build_omp_continue (integer_zero_node,
12494 integer_zero_node));
12495 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12496 gimple_omp_set_body (stmt, new_body);
12498 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12499 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12500 else
12501 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12502 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12503 gimple_bind_add_seq (bind, ilist);
12504 gimple_bind_add_stmt (bind, stmt);
12505 gimple_bind_add_seq (bind, olist);
12507 pop_gimplify_context (NULL);
12509 if (dep_bind)
12511 gimple_bind_add_seq (dep_bind, dep_ilist);
12512 gimple_bind_add_seq (dep_bind, tskred_ilist);
12513 gimple_bind_add_stmt (dep_bind, bind);
12514 gimple_bind_add_seq (dep_bind, tskred_olist);
12515 gimple_bind_add_seq (dep_bind, dep_olist);
12516 pop_gimplify_context (dep_bind);
12520 /* Lower the GIMPLE_OMP_TARGET in the current statement
12521 in GSI_P. CTX holds context information for the directive. */
12523 static void
12524 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12526 tree clauses;
12527 tree child_fn, t, c;
12528 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12529 gbind *tgt_bind, *bind, *dep_bind = NULL;
12530 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12531 location_t loc = gimple_location (stmt);
12532 bool offloaded, data_region;
12533 unsigned int map_cnt = 0;
12534 tree in_reduction_clauses = NULL_TREE;
12536 offloaded = is_gimple_omp_offloaded (stmt);
12537 switch (gimple_omp_target_kind (stmt))
12539 case GF_OMP_TARGET_KIND_REGION:
12540 tree *p, *q;
12541 q = &in_reduction_clauses;
12542 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12543 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12545 *q = *p;
12546 q = &OMP_CLAUSE_CHAIN (*q);
12547 *p = OMP_CLAUSE_CHAIN (*p);
12549 else
12550 p = &OMP_CLAUSE_CHAIN (*p);
12551 *q = NULL_TREE;
12552 *p = in_reduction_clauses;
12553 /* FALLTHRU */
12554 case GF_OMP_TARGET_KIND_UPDATE:
12555 case GF_OMP_TARGET_KIND_ENTER_DATA:
12556 case GF_OMP_TARGET_KIND_EXIT_DATA:
12557 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12558 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12559 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12560 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12561 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12562 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12563 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12564 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12565 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12566 data_region = false;
12567 break;
12568 case GF_OMP_TARGET_KIND_DATA:
12569 case GF_OMP_TARGET_KIND_OACC_DATA:
12570 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12571 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12572 data_region = true;
12573 break;
12574 default:
12575 gcc_unreachable ();
12578 clauses = gimple_omp_target_clauses (stmt);
12580 gimple_seq dep_ilist = NULL;
12581 gimple_seq dep_olist = NULL;
12582 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12583 if (has_depend || in_reduction_clauses)
12585 push_gimplify_context ();
12586 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12587 if (has_depend)
12588 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12589 &dep_ilist, &dep_olist);
12590 if (in_reduction_clauses)
12591 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12592 ctx, NULL);
12595 tgt_bind = NULL;
12596 tgt_body = NULL;
12597 if (offloaded)
12599 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12600 tgt_body = gimple_bind_body (tgt_bind);
12602 else if (data_region)
12603 tgt_body = gimple_omp_body (stmt);
12604 child_fn = ctx->cb.dst_fn;
12606 push_gimplify_context ();
12607 fplist = NULL;
12609 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12610 switch (OMP_CLAUSE_CODE (c))
12612 tree var, x;
12614 default:
12615 break;
12616 case OMP_CLAUSE_MAP:
12617 #if CHECKING_P
12618 /* First check what we're prepared to handle in the following. */
12619 switch (OMP_CLAUSE_MAP_KIND (c))
12621 case GOMP_MAP_ALLOC:
12622 case GOMP_MAP_TO:
12623 case GOMP_MAP_FROM:
12624 case GOMP_MAP_TOFROM:
12625 case GOMP_MAP_POINTER:
12626 case GOMP_MAP_TO_PSET:
12627 case GOMP_MAP_DELETE:
12628 case GOMP_MAP_RELEASE:
12629 case GOMP_MAP_ALWAYS_TO:
12630 case GOMP_MAP_ALWAYS_FROM:
12631 case GOMP_MAP_ALWAYS_TOFROM:
12632 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12633 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12634 case GOMP_MAP_STRUCT:
12635 case GOMP_MAP_ALWAYS_POINTER:
12636 case GOMP_MAP_ATTACH:
12637 case GOMP_MAP_DETACH:
12638 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
12639 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
12640 break;
12641 case GOMP_MAP_IF_PRESENT:
12642 case GOMP_MAP_FORCE_ALLOC:
12643 case GOMP_MAP_FORCE_TO:
12644 case GOMP_MAP_FORCE_FROM:
12645 case GOMP_MAP_FORCE_TOFROM:
12646 case GOMP_MAP_FORCE_PRESENT:
12647 case GOMP_MAP_FORCE_DEVICEPTR:
12648 case GOMP_MAP_DEVICE_RESIDENT:
12649 case GOMP_MAP_LINK:
12650 case GOMP_MAP_FORCE_DETACH:
12651 gcc_assert (is_gimple_omp_oacc (stmt));
12652 break;
12653 default:
12654 gcc_unreachable ();
12656 #endif
12657 /* FALLTHRU */
12658 case OMP_CLAUSE_TO:
12659 case OMP_CLAUSE_FROM:
12660 oacc_firstprivate:
12661 var = OMP_CLAUSE_DECL (c);
12662 if (!DECL_P (var))
12664 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12665 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12666 && (OMP_CLAUSE_MAP_KIND (c)
12667 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12668 map_cnt++;
12669 continue;
12672 if (DECL_SIZE (var)
12673 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12675 tree var2 = DECL_VALUE_EXPR (var);
12676 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12677 var2 = TREE_OPERAND (var2, 0);
12678 gcc_assert (DECL_P (var2));
12679 var = var2;
12682 if (offloaded
12683 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12684 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12685 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12687 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12689 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12690 && varpool_node::get_create (var)->offloadable)
12691 continue;
12693 tree type = build_pointer_type (TREE_TYPE (var));
12694 tree new_var = lookup_decl (var, ctx);
12695 x = create_tmp_var_raw (type, get_name (new_var));
12696 gimple_add_tmp_var (x);
12697 x = build_simple_mem_ref (x);
12698 SET_DECL_VALUE_EXPR (new_var, x);
12699 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12701 continue;
12704 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12705 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12706 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12707 && is_omp_target (stmt))
12709 gcc_assert (maybe_lookup_field (c, ctx));
12710 map_cnt++;
12711 continue;
12714 if (!maybe_lookup_field (var, ctx))
12715 continue;
12717 /* Don't remap compute constructs' reduction variables, because the
12718 intermediate result must be local to each gang. */
12719 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12720 && is_gimple_omp_oacc (ctx->stmt)
12721 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12723 x = build_receiver_ref (var, true, ctx);
12724 tree new_var = lookup_decl (var, ctx);
12726 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12727 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12728 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12729 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12730 x = build_simple_mem_ref (x);
12731 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12733 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12734 if (omp_privatize_by_reference (new_var)
12735 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12736 || DECL_BY_REFERENCE (var)))
12738 /* Create a local object to hold the instance
12739 value. */
12740 tree type = TREE_TYPE (TREE_TYPE (new_var));
12741 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12742 tree inst = create_tmp_var (type, id);
12743 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12744 x = build_fold_addr_expr (inst);
12746 gimplify_assign (new_var, x, &fplist);
12748 else if (DECL_P (new_var))
12750 SET_DECL_VALUE_EXPR (new_var, x);
12751 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12753 else
12754 gcc_unreachable ();
12756 map_cnt++;
12757 break;
12759 case OMP_CLAUSE_FIRSTPRIVATE:
12760 gcc_checking_assert (offloaded);
12761 if (is_gimple_omp_oacc (ctx->stmt))
12763 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12764 gcc_checking_assert (!is_oacc_kernels (ctx));
12765 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12766 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12768 goto oacc_firstprivate;
12770 map_cnt++;
12771 var = OMP_CLAUSE_DECL (c);
12772 if (!omp_privatize_by_reference (var)
12773 && !is_gimple_reg_type (TREE_TYPE (var)))
12775 tree new_var = lookup_decl (var, ctx);
12776 if (is_variable_sized (var))
12778 tree pvar = DECL_VALUE_EXPR (var);
12779 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12780 pvar = TREE_OPERAND (pvar, 0);
12781 gcc_assert (DECL_P (pvar));
12782 tree new_pvar = lookup_decl (pvar, ctx);
12783 x = build_fold_indirect_ref (new_pvar);
12784 TREE_THIS_NOTRAP (x) = 1;
12786 else
12787 x = build_receiver_ref (var, true, ctx);
12788 SET_DECL_VALUE_EXPR (new_var, x);
12789 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12791 break;
12793 case OMP_CLAUSE_PRIVATE:
12794 gcc_checking_assert (offloaded);
12795 if (is_gimple_omp_oacc (ctx->stmt))
12797 /* No 'private' clauses on OpenACC 'kernels'. */
12798 gcc_checking_assert (!is_oacc_kernels (ctx));
12799 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12800 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12802 break;
12804 var = OMP_CLAUSE_DECL (c);
12805 if (is_variable_sized (var))
12807 tree new_var = lookup_decl (var, ctx);
12808 tree pvar = DECL_VALUE_EXPR (var);
12809 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12810 pvar = TREE_OPERAND (pvar, 0);
12811 gcc_assert (DECL_P (pvar));
12812 tree new_pvar = lookup_decl (pvar, ctx);
12813 x = build_fold_indirect_ref (new_pvar);
12814 TREE_THIS_NOTRAP (x) = 1;
12815 SET_DECL_VALUE_EXPR (new_var, x);
12816 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12818 break;
12820 case OMP_CLAUSE_USE_DEVICE_PTR:
12821 case OMP_CLAUSE_USE_DEVICE_ADDR:
12822 case OMP_CLAUSE_IS_DEVICE_PTR:
12823 var = OMP_CLAUSE_DECL (c);
12824 map_cnt++;
12825 if (is_variable_sized (var))
12827 tree new_var = lookup_decl (var, ctx);
12828 tree pvar = DECL_VALUE_EXPR (var);
12829 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12830 pvar = TREE_OPERAND (pvar, 0);
12831 gcc_assert (DECL_P (pvar));
12832 tree new_pvar = lookup_decl (pvar, ctx);
12833 x = build_fold_indirect_ref (new_pvar);
12834 TREE_THIS_NOTRAP (x) = 1;
12835 SET_DECL_VALUE_EXPR (new_var, x);
12836 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12838 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12839 && !omp_privatize_by_reference (var)
12840 && !omp_is_allocatable_or_ptr (var)
12841 && !lang_hooks.decls.omp_array_data (var, true))
12842 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12844 tree new_var = lookup_decl (var, ctx);
12845 tree type = build_pointer_type (TREE_TYPE (var));
12846 x = create_tmp_var_raw (type, get_name (new_var));
12847 gimple_add_tmp_var (x);
12848 x = build_simple_mem_ref (x);
12849 SET_DECL_VALUE_EXPR (new_var, x);
12850 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12852 else
12854 tree new_var = lookup_decl (var, ctx);
12855 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12856 gimple_add_tmp_var (x);
12857 SET_DECL_VALUE_EXPR (new_var, x);
12858 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12860 break;
12863 if (offloaded)
12865 target_nesting_level++;
12866 lower_omp (&tgt_body, ctx);
12867 target_nesting_level--;
12869 else if (data_region)
12870 lower_omp (&tgt_body, ctx);
12872 if (offloaded)
12874 /* Declare all the variables created by mapping and the variables
12875 declared in the scope of the target body. */
12876 record_vars_into (ctx->block_vars, child_fn);
12877 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
12878 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
12881 olist = NULL;
12882 ilist = NULL;
12883 if (ctx->record_type)
12885 ctx->sender_decl
12886 = create_tmp_var (ctx->record_type, ".omp_data_arr");
12887 DECL_NAMELESS (ctx->sender_decl) = 1;
12888 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12889 t = make_tree_vec (3);
12890 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12891 TREE_VEC_ELT (t, 1)
12892 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12893 ".omp_data_sizes");
12894 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12895 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12896 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12897 tree tkind_type = short_unsigned_type_node;
12898 int talign_shift = 8;
12899 TREE_VEC_ELT (t, 2)
12900 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12901 ".omp_data_kinds");
12902 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12903 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12904 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12905 gimple_omp_target_set_data_arg (stmt, t);
12907 vec<constructor_elt, va_gc> *vsize;
12908 vec<constructor_elt, va_gc> *vkind;
12909 vec_alloc (vsize, map_cnt);
12910 vec_alloc (vkind, map_cnt);
12911 unsigned int map_idx = 0;
12913 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12914 switch (OMP_CLAUSE_CODE (c))
12916 tree ovar, nc, s, purpose, var, x, type;
12917 unsigned int talign;
12919 default:
12920 break;
12922 case OMP_CLAUSE_MAP:
12923 case OMP_CLAUSE_TO:
12924 case OMP_CLAUSE_FROM:
12925 oacc_firstprivate_map:
12926 nc = c;
12927 ovar = OMP_CLAUSE_DECL (c);
12928 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12929 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12930 || (OMP_CLAUSE_MAP_KIND (c)
12931 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12932 break;
12933 if (!DECL_P (ovar))
12935 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12936 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
12938 nc = OMP_CLAUSE_CHAIN (c);
12939 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
12940 == get_base_address (ovar));
12941 ovar = OMP_CLAUSE_DECL (nc);
12943 else
12945 tree x = build_sender_ref (ovar, ctx);
12946 tree v = ovar;
12947 if (in_reduction_clauses
12948 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12949 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
12951 v = unshare_expr (v);
12952 tree *p = &v;
12953 while (handled_component_p (*p)
12954 || TREE_CODE (*p) == INDIRECT_REF
12955 || TREE_CODE (*p) == ADDR_EXPR
12956 || TREE_CODE (*p) == MEM_REF
12957 || TREE_CODE (*p) == NON_LVALUE_EXPR)
12958 p = &TREE_OPERAND (*p, 0);
12959 tree d = *p;
12960 if (is_variable_sized (d))
12962 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
12963 d = DECL_VALUE_EXPR (d);
12964 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
12965 d = TREE_OPERAND (d, 0);
12966 gcc_assert (DECL_P (d));
12968 splay_tree_key key
12969 = (splay_tree_key) &DECL_CONTEXT (d);
12970 tree nd = (tree) splay_tree_lookup (ctx->field_map,
12971 key)->value;
12972 if (d == *p)
12973 *p = nd;
12974 else
12975 *p = build_fold_indirect_ref (nd);
12977 v = build_fold_addr_expr_with_type (v, ptr_type_node);
12978 gimplify_assign (x, v, &ilist);
12979 nc = NULL_TREE;
12982 else
12984 if (DECL_SIZE (ovar)
12985 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
12987 tree ovar2 = DECL_VALUE_EXPR (ovar);
12988 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
12989 ovar2 = TREE_OPERAND (ovar2, 0);
12990 gcc_assert (DECL_P (ovar2));
12991 ovar = ovar2;
12993 if (!maybe_lookup_field (ovar, ctx)
12994 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12995 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12996 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
12997 continue;
13000 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
13001 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
13002 talign = DECL_ALIGN_UNIT (ovar);
13004 var = NULL_TREE;
13005 if (nc)
13007 if (in_reduction_clauses
13008 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13009 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13011 tree d = ovar;
13012 if (is_variable_sized (d))
13014 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13015 d = DECL_VALUE_EXPR (d);
13016 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13017 d = TREE_OPERAND (d, 0);
13018 gcc_assert (DECL_P (d));
13020 splay_tree_key key
13021 = (splay_tree_key) &DECL_CONTEXT (d);
13022 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13023 key)->value;
13024 if (d == ovar)
13025 var = nd;
13026 else
13027 var = build_fold_indirect_ref (nd);
13029 else
13030 var = lookup_decl_in_outer_ctx (ovar, ctx);
13032 if (nc
13033 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13034 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13035 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13036 && is_omp_target (stmt))
13038 x = build_sender_ref (c, ctx);
13039 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13041 else if (nc)
13043 x = build_sender_ref (ovar, ctx);
13045 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13046 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13047 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13048 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13050 gcc_assert (offloaded);
13051 tree avar
13052 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13053 mark_addressable (avar);
13054 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13055 talign = DECL_ALIGN_UNIT (avar);
13056 avar = build_fold_addr_expr (avar);
13057 gimplify_assign (x, avar, &ilist);
13059 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13061 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13062 if (!omp_privatize_by_reference (var))
13064 if (is_gimple_reg (var)
13065 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13066 suppress_warning (var);
13067 var = build_fold_addr_expr (var);
13069 else
13070 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13071 gimplify_assign (x, var, &ilist);
13073 else if (is_gimple_reg (var))
13075 gcc_assert (offloaded);
13076 tree avar = create_tmp_var (TREE_TYPE (var));
13077 mark_addressable (avar);
13078 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13079 if (GOMP_MAP_COPY_TO_P (map_kind)
13080 || map_kind == GOMP_MAP_POINTER
13081 || map_kind == GOMP_MAP_TO_PSET
13082 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13084 /* If we need to initialize a temporary
13085 with VAR because it is not addressable, and
13086 the variable hasn't been initialized yet, then
13087 we'll get a warning for the store to avar.
13088 Don't warn in that case, the mapping might
13089 be implicit. */
13090 suppress_warning (var, OPT_Wuninitialized);
13091 gimplify_assign (avar, var, &ilist);
13093 avar = build_fold_addr_expr (avar);
13094 gimplify_assign (x, avar, &ilist);
13095 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13096 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13097 && !TYPE_READONLY (TREE_TYPE (var)))
13099 x = unshare_expr (x);
13100 x = build_simple_mem_ref (x);
13101 gimplify_assign (var, x, &olist);
13104 else
13106 /* While MAP is handled explicitly by the FE,
13107 for 'target update', only the identified is passed. */
13108 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13109 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13110 && (omp_is_allocatable_or_ptr (var)
13111 && omp_check_optional_argument (var, false)))
13112 var = build_fold_indirect_ref (var);
13113 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13114 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13115 || (!omp_is_allocatable_or_ptr (var)
13116 && !omp_check_optional_argument (var, false)))
13117 var = build_fold_addr_expr (var);
13118 gimplify_assign (x, var, &ilist);
13121 s = NULL_TREE;
13122 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13124 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13125 s = TREE_TYPE (ovar);
13126 if (TREE_CODE (s) == REFERENCE_TYPE
13127 || omp_check_optional_argument (ovar, false))
13128 s = TREE_TYPE (s);
13129 s = TYPE_SIZE_UNIT (s);
13131 else
13132 s = OMP_CLAUSE_SIZE (c);
13133 if (s == NULL_TREE)
13134 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13135 s = fold_convert (size_type_node, s);
13136 purpose = size_int (map_idx++);
13137 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13138 if (TREE_CODE (s) != INTEGER_CST)
13139 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13141 unsigned HOST_WIDE_INT tkind, tkind_zero;
13142 switch (OMP_CLAUSE_CODE (c))
13144 case OMP_CLAUSE_MAP:
13145 tkind = OMP_CLAUSE_MAP_KIND (c);
13146 tkind_zero = tkind;
13147 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13148 switch (tkind)
13150 case GOMP_MAP_ALLOC:
13151 case GOMP_MAP_IF_PRESENT:
13152 case GOMP_MAP_TO:
13153 case GOMP_MAP_FROM:
13154 case GOMP_MAP_TOFROM:
13155 case GOMP_MAP_ALWAYS_TO:
13156 case GOMP_MAP_ALWAYS_FROM:
13157 case GOMP_MAP_ALWAYS_TOFROM:
13158 case GOMP_MAP_RELEASE:
13159 case GOMP_MAP_FORCE_TO:
13160 case GOMP_MAP_FORCE_FROM:
13161 case GOMP_MAP_FORCE_TOFROM:
13162 case GOMP_MAP_FORCE_PRESENT:
13163 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13164 break;
13165 case GOMP_MAP_DELETE:
13166 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13167 default:
13168 break;
13170 if (tkind_zero != tkind)
13172 if (integer_zerop (s))
13173 tkind = tkind_zero;
13174 else if (integer_nonzerop (s))
13175 tkind_zero = tkind;
13177 if (tkind_zero == tkind
13178 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c)
13179 && (((tkind & GOMP_MAP_FLAG_SPECIAL_BITS)
13180 & ~GOMP_MAP_IMPLICIT)
13181 == 0))
13183 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13184 bits are not interfered by other special bit encodings,
13185 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13186 to see. */
13187 tkind |= GOMP_MAP_IMPLICIT;
13188 tkind_zero = tkind;
13190 break;
13191 case OMP_CLAUSE_FIRSTPRIVATE:
13192 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13193 tkind = GOMP_MAP_TO;
13194 tkind_zero = tkind;
13195 break;
13196 case OMP_CLAUSE_TO:
13197 tkind = GOMP_MAP_TO;
13198 tkind_zero = tkind;
13199 break;
13200 case OMP_CLAUSE_FROM:
13201 tkind = GOMP_MAP_FROM;
13202 tkind_zero = tkind;
13203 break;
13204 default:
13205 gcc_unreachable ();
13207 gcc_checking_assert (tkind
13208 < (HOST_WIDE_INT_C (1U) << talign_shift));
13209 gcc_checking_assert (tkind_zero
13210 < (HOST_WIDE_INT_C (1U) << talign_shift));
13211 talign = ceil_log2 (talign);
13212 tkind |= talign << talign_shift;
13213 tkind_zero |= talign << talign_shift;
13214 gcc_checking_assert (tkind
13215 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13216 gcc_checking_assert (tkind_zero
13217 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13218 if (tkind == tkind_zero)
13219 x = build_int_cstu (tkind_type, tkind);
13220 else
13222 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13223 x = build3 (COND_EXPR, tkind_type,
13224 fold_build2 (EQ_EXPR, boolean_type_node,
13225 unshare_expr (s), size_zero_node),
13226 build_int_cstu (tkind_type, tkind_zero),
13227 build_int_cstu (tkind_type, tkind));
13229 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13230 if (nc && nc != c)
13231 c = nc;
13232 break;
13234 case OMP_CLAUSE_FIRSTPRIVATE:
13235 if (is_gimple_omp_oacc (ctx->stmt))
13236 goto oacc_firstprivate_map;
13237 ovar = OMP_CLAUSE_DECL (c);
13238 if (omp_privatize_by_reference (ovar))
13239 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13240 else
13241 talign = DECL_ALIGN_UNIT (ovar);
13242 var = lookup_decl_in_outer_ctx (ovar, ctx);
13243 x = build_sender_ref (ovar, ctx);
13244 tkind = GOMP_MAP_FIRSTPRIVATE;
13245 type = TREE_TYPE (ovar);
13246 if (omp_privatize_by_reference (ovar))
13247 type = TREE_TYPE (type);
13248 if ((INTEGRAL_TYPE_P (type)
13249 && TYPE_PRECISION (type) <= POINTER_SIZE)
13250 || TREE_CODE (type) == POINTER_TYPE)
13252 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13253 tree t = var;
13254 if (omp_privatize_by_reference (var))
13255 t = build_simple_mem_ref (var);
13256 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13257 suppress_warning (var);
13258 if (TREE_CODE (type) != POINTER_TYPE)
13259 t = fold_convert (pointer_sized_int_node, t);
13260 t = fold_convert (TREE_TYPE (x), t);
13261 gimplify_assign (x, t, &ilist);
13263 else if (omp_privatize_by_reference (var))
13264 gimplify_assign (x, var, &ilist);
13265 else if (is_gimple_reg (var))
13267 tree avar = create_tmp_var (TREE_TYPE (var));
13268 mark_addressable (avar);
13269 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13270 suppress_warning (var);
13271 gimplify_assign (avar, var, &ilist);
13272 avar = build_fold_addr_expr (avar);
13273 gimplify_assign (x, avar, &ilist);
13275 else
13277 var = build_fold_addr_expr (var);
13278 gimplify_assign (x, var, &ilist);
13280 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13281 s = size_int (0);
13282 else if (omp_privatize_by_reference (ovar))
13283 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13284 else
13285 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13286 s = fold_convert (size_type_node, s);
13287 purpose = size_int (map_idx++);
13288 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13289 if (TREE_CODE (s) != INTEGER_CST)
13290 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13292 gcc_checking_assert (tkind
13293 < (HOST_WIDE_INT_C (1U) << talign_shift));
13294 talign = ceil_log2 (talign);
13295 tkind |= talign << talign_shift;
13296 gcc_checking_assert (tkind
13297 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13298 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13299 build_int_cstu (tkind_type, tkind));
13300 break;
13302 case OMP_CLAUSE_USE_DEVICE_PTR:
13303 case OMP_CLAUSE_USE_DEVICE_ADDR:
13304 case OMP_CLAUSE_IS_DEVICE_PTR:
13305 ovar = OMP_CLAUSE_DECL (c);
13306 var = lookup_decl_in_outer_ctx (ovar, ctx);
13308 if (lang_hooks.decls.omp_array_data (ovar, true))
13310 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13311 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13312 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13314 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13316 tkind = GOMP_MAP_USE_DEVICE_PTR;
13317 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13319 else
13321 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13322 x = build_sender_ref (ovar, ctx);
13325 if (is_gimple_omp_oacc (ctx->stmt))
13327 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13329 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13330 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13333 type = TREE_TYPE (ovar);
13334 if (lang_hooks.decls.omp_array_data (ovar, true))
13335 var = lang_hooks.decls.omp_array_data (ovar, false);
13336 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13337 && !omp_privatize_by_reference (ovar)
13338 && !omp_is_allocatable_or_ptr (ovar))
13339 || TREE_CODE (type) == ARRAY_TYPE)
13340 var = build_fold_addr_expr (var);
13341 else
13343 if (omp_privatize_by_reference (ovar)
13344 || omp_check_optional_argument (ovar, false)
13345 || omp_is_allocatable_or_ptr (ovar))
13347 type = TREE_TYPE (type);
13348 if (POINTER_TYPE_P (type)
13349 && TREE_CODE (type) != ARRAY_TYPE
13350 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13351 && !omp_is_allocatable_or_ptr (ovar))
13352 || (omp_privatize_by_reference (ovar)
13353 && omp_is_allocatable_or_ptr (ovar))))
13354 var = build_simple_mem_ref (var);
13355 var = fold_convert (TREE_TYPE (x), var);
13358 tree present;
13359 present = omp_check_optional_argument (ovar, true);
13360 if (present)
13362 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13363 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13364 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13365 tree new_x = unshare_expr (x);
13366 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13367 fb_rvalue);
13368 gcond *cond = gimple_build_cond_from_tree (present,
13369 notnull_label,
13370 null_label);
13371 gimple_seq_add_stmt (&ilist, cond);
13372 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13373 gimplify_assign (new_x, null_pointer_node, &ilist);
13374 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13375 gimple_seq_add_stmt (&ilist,
13376 gimple_build_label (notnull_label));
13377 gimplify_assign (x, var, &ilist);
13378 gimple_seq_add_stmt (&ilist,
13379 gimple_build_label (opt_arg_label));
13381 else
13382 gimplify_assign (x, var, &ilist);
13383 s = size_int (0);
13384 purpose = size_int (map_idx++);
13385 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13386 gcc_checking_assert (tkind
13387 < (HOST_WIDE_INT_C (1U) << talign_shift));
13388 gcc_checking_assert (tkind
13389 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13390 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13391 build_int_cstu (tkind_type, tkind));
13392 break;
13395 gcc_assert (map_idx == map_cnt);
13397 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13398 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13399 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13400 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13401 for (int i = 1; i <= 2; i++)
13402 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13404 gimple_seq initlist = NULL;
13405 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13406 TREE_VEC_ELT (t, i)),
13407 &initlist, true, NULL_TREE);
13408 gimple_seq_add_seq (&ilist, initlist);
13410 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13411 gimple_seq_add_stmt (&olist,
13412 gimple_build_assign (TREE_VEC_ELT (t, i),
13413 clobber));
13415 else if (omp_maybe_offloaded_ctx (ctx->outer))
13417 tree id = get_identifier ("omp declare target");
13418 tree decl = TREE_VEC_ELT (t, i);
13419 DECL_ATTRIBUTES (decl)
13420 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13421 varpool_node *node = varpool_node::get (decl);
13422 if (node)
13424 node->offloadable = 1;
13425 if (ENABLE_OFFLOADING)
13427 g->have_offload = true;
13428 vec_safe_push (offload_vars, t);
13433 tree clobber = build_clobber (ctx->record_type);
13434 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13435 clobber));
13438 /* Once all the expansions are done, sequence all the different
13439 fragments inside gimple_omp_body. */
13441 new_body = NULL;
13443 if (offloaded
13444 && ctx->record_type)
13446 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13447 /* fixup_child_record_type might have changed receiver_decl's type. */
13448 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13449 gimple_seq_add_stmt (&new_body,
13450 gimple_build_assign (ctx->receiver_decl, t));
13452 gimple_seq_add_seq (&new_body, fplist);
13454 if (offloaded || data_region)
13456 tree prev = NULL_TREE;
13457 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13458 switch (OMP_CLAUSE_CODE (c))
13460 tree var, x;
13461 default:
13462 break;
13463 case OMP_CLAUSE_FIRSTPRIVATE:
13464 if (is_gimple_omp_oacc (ctx->stmt))
13465 break;
13466 var = OMP_CLAUSE_DECL (c);
13467 if (omp_privatize_by_reference (var)
13468 || is_gimple_reg_type (TREE_TYPE (var)))
13470 tree new_var = lookup_decl (var, ctx);
13471 tree type;
13472 type = TREE_TYPE (var);
13473 if (omp_privatize_by_reference (var))
13474 type = TREE_TYPE (type);
13475 if ((INTEGRAL_TYPE_P (type)
13476 && TYPE_PRECISION (type) <= POINTER_SIZE)
13477 || TREE_CODE (type) == POINTER_TYPE)
13479 x = build_receiver_ref (var, false, ctx);
13480 if (TREE_CODE (type) != POINTER_TYPE)
13481 x = fold_convert (pointer_sized_int_node, x);
13482 x = fold_convert (type, x);
13483 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13484 fb_rvalue);
13485 if (omp_privatize_by_reference (var))
13487 tree v = create_tmp_var_raw (type, get_name (var));
13488 gimple_add_tmp_var (v);
13489 TREE_ADDRESSABLE (v) = 1;
13490 gimple_seq_add_stmt (&new_body,
13491 gimple_build_assign (v, x));
13492 x = build_fold_addr_expr (v);
13494 gimple_seq_add_stmt (&new_body,
13495 gimple_build_assign (new_var, x));
13497 else
13499 bool by_ref = !omp_privatize_by_reference (var);
13500 x = build_receiver_ref (var, by_ref, ctx);
13501 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13502 fb_rvalue);
13503 gimple_seq_add_stmt (&new_body,
13504 gimple_build_assign (new_var, x));
13507 else if (is_variable_sized (var))
13509 tree pvar = DECL_VALUE_EXPR (var);
13510 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13511 pvar = TREE_OPERAND (pvar, 0);
13512 gcc_assert (DECL_P (pvar));
13513 tree new_var = lookup_decl (pvar, ctx);
13514 x = build_receiver_ref (var, false, ctx);
13515 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13516 gimple_seq_add_stmt (&new_body,
13517 gimple_build_assign (new_var, x));
13519 break;
13520 case OMP_CLAUSE_PRIVATE:
13521 if (is_gimple_omp_oacc (ctx->stmt))
13522 break;
13523 var = OMP_CLAUSE_DECL (c);
13524 if (omp_privatize_by_reference (var))
13526 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13527 tree new_var = lookup_decl (var, ctx);
13528 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13529 if (TREE_CONSTANT (x))
13531 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13532 get_name (var));
13533 gimple_add_tmp_var (x);
13534 TREE_ADDRESSABLE (x) = 1;
13535 x = build_fold_addr_expr_loc (clause_loc, x);
13537 else
13538 break;
13540 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13541 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13542 gimple_seq_add_stmt (&new_body,
13543 gimple_build_assign (new_var, x));
13545 break;
13546 case OMP_CLAUSE_USE_DEVICE_PTR:
13547 case OMP_CLAUSE_USE_DEVICE_ADDR:
13548 case OMP_CLAUSE_IS_DEVICE_PTR:
13549 tree new_var;
13550 gimple_seq assign_body;
13551 bool is_array_data;
13552 bool do_optional_check;
13553 assign_body = NULL;
13554 do_optional_check = false;
13555 var = OMP_CLAUSE_DECL (c);
13556 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13558 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13559 x = build_sender_ref (is_array_data
13560 ? (splay_tree_key) &DECL_NAME (var)
13561 : (splay_tree_key) &DECL_UID (var), ctx);
13562 else
13563 x = build_receiver_ref (var, false, ctx);
13565 if (is_array_data)
13567 bool is_ref = omp_privatize_by_reference (var);
13568 do_optional_check = true;
13569 /* First, we copy the descriptor data from the host; then
13570 we update its data to point to the target address. */
13571 new_var = lookup_decl (var, ctx);
13572 new_var = DECL_VALUE_EXPR (new_var);
13573 tree v = new_var;
13575 if (is_ref)
13577 var = build_fold_indirect_ref (var);
13578 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
13579 fb_rvalue);
13580 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
13581 gimple_add_tmp_var (v);
13582 TREE_ADDRESSABLE (v) = 1;
13583 gimple_seq_add_stmt (&assign_body,
13584 gimple_build_assign (v, var));
13585 tree rhs = build_fold_addr_expr (v);
13586 gimple_seq_add_stmt (&assign_body,
13587 gimple_build_assign (new_var, rhs));
13589 else
13590 gimple_seq_add_stmt (&assign_body,
13591 gimple_build_assign (new_var, var));
13593 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13594 gcc_assert (v2);
13595 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13596 gimple_seq_add_stmt (&assign_body,
13597 gimple_build_assign (v2, x));
13599 else if (is_variable_sized (var))
13601 tree pvar = DECL_VALUE_EXPR (var);
13602 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13603 pvar = TREE_OPERAND (pvar, 0);
13604 gcc_assert (DECL_P (pvar));
13605 new_var = lookup_decl (pvar, ctx);
13606 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13607 gimple_seq_add_stmt (&assign_body,
13608 gimple_build_assign (new_var, x));
13610 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13611 && !omp_privatize_by_reference (var)
13612 && !omp_is_allocatable_or_ptr (var))
13613 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13615 new_var = lookup_decl (var, ctx);
13616 new_var = DECL_VALUE_EXPR (new_var);
13617 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13618 new_var = TREE_OPERAND (new_var, 0);
13619 gcc_assert (DECL_P (new_var));
13620 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13621 gimple_seq_add_stmt (&assign_body,
13622 gimple_build_assign (new_var, x));
13624 else
13626 tree type = TREE_TYPE (var);
13627 new_var = lookup_decl (var, ctx);
13628 if (omp_privatize_by_reference (var))
13630 type = TREE_TYPE (type);
13631 if (POINTER_TYPE_P (type)
13632 && TREE_CODE (type) != ARRAY_TYPE
13633 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13634 || (omp_privatize_by_reference (var)
13635 && omp_is_allocatable_or_ptr (var))))
13637 tree v = create_tmp_var_raw (type, get_name (var));
13638 gimple_add_tmp_var (v);
13639 TREE_ADDRESSABLE (v) = 1;
13640 x = fold_convert (type, x);
13641 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13642 fb_rvalue);
13643 gimple_seq_add_stmt (&assign_body,
13644 gimple_build_assign (v, x));
13645 x = build_fold_addr_expr (v);
13646 do_optional_check = true;
13649 new_var = DECL_VALUE_EXPR (new_var);
13650 x = fold_convert (TREE_TYPE (new_var), x);
13651 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13652 gimple_seq_add_stmt (&assign_body,
13653 gimple_build_assign (new_var, x));
13655 tree present;
13656 present = (do_optional_check
13657 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13658 : NULL_TREE);
13659 if (present)
13661 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13662 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13663 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13664 glabel *null_glabel = gimple_build_label (null_label);
13665 glabel *notnull_glabel = gimple_build_label (notnull_label);
13666 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13667 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13668 fb_rvalue);
13669 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13670 fb_rvalue);
13671 gcond *cond = gimple_build_cond_from_tree (present,
13672 notnull_label,
13673 null_label);
13674 gimple_seq_add_stmt (&new_body, cond);
13675 gimple_seq_add_stmt (&new_body, null_glabel);
13676 gimplify_assign (new_var, null_pointer_node, &new_body);
13677 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13678 gimple_seq_add_stmt (&new_body, notnull_glabel);
13679 gimple_seq_add_seq (&new_body, assign_body);
13680 gimple_seq_add_stmt (&new_body,
13681 gimple_build_label (opt_arg_label));
13683 else
13684 gimple_seq_add_seq (&new_body, assign_body);
13685 break;
13687 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13688 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13689 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13690 or references to VLAs. */
13691 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13692 switch (OMP_CLAUSE_CODE (c))
13694 tree var;
13695 default:
13696 break;
13697 case OMP_CLAUSE_MAP:
13698 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13699 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13701 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13702 poly_int64 offset = 0;
13703 gcc_assert (prev);
13704 var = OMP_CLAUSE_DECL (c);
13705 if (DECL_P (var)
13706 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13707 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13708 ctx))
13709 && varpool_node::get_create (var)->offloadable)
13710 break;
13711 if (TREE_CODE (var) == INDIRECT_REF
13712 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
13713 var = TREE_OPERAND (var, 0);
13714 if (TREE_CODE (var) == COMPONENT_REF)
13716 var = get_addr_base_and_unit_offset (var, &offset);
13717 gcc_assert (var != NULL_TREE && DECL_P (var));
13719 else if (DECL_SIZE (var)
13720 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
13722 tree var2 = DECL_VALUE_EXPR (var);
13723 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
13724 var2 = TREE_OPERAND (var2, 0);
13725 gcc_assert (DECL_P (var2));
13726 var = var2;
13728 tree new_var = lookup_decl (var, ctx), x;
13729 tree type = TREE_TYPE (new_var);
13730 bool is_ref;
13731 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
13732 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13733 == COMPONENT_REF))
13735 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
13736 is_ref = true;
13737 new_var = build2 (MEM_REF, type,
13738 build_fold_addr_expr (new_var),
13739 build_int_cst (build_pointer_type (type),
13740 offset));
13742 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
13744 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
13745 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
13746 new_var = build2 (MEM_REF, type,
13747 build_fold_addr_expr (new_var),
13748 build_int_cst (build_pointer_type (type),
13749 offset));
13751 else
13752 is_ref = omp_privatize_by_reference (var);
13753 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13754 is_ref = false;
13755 bool ref_to_array = false;
13756 if (is_ref)
13758 type = TREE_TYPE (type);
13759 if (TREE_CODE (type) == ARRAY_TYPE)
13761 type = build_pointer_type (type);
13762 ref_to_array = true;
13765 else if (TREE_CODE (type) == ARRAY_TYPE)
13767 tree decl2 = DECL_VALUE_EXPR (new_var);
13768 gcc_assert (TREE_CODE (decl2) == MEM_REF);
13769 decl2 = TREE_OPERAND (decl2, 0);
13770 gcc_assert (DECL_P (decl2));
13771 new_var = decl2;
13772 type = TREE_TYPE (new_var);
13774 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
13775 x = fold_convert_loc (clause_loc, type, x);
13776 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
13778 tree bias = OMP_CLAUSE_SIZE (c);
13779 if (DECL_P (bias))
13780 bias = lookup_decl (bias, ctx);
13781 bias = fold_convert_loc (clause_loc, sizetype, bias);
13782 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
13783 bias);
13784 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
13785 TREE_TYPE (x), x, bias);
13787 if (ref_to_array)
13788 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13789 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13790 if (is_ref && !ref_to_array)
13792 tree t = create_tmp_var_raw (type, get_name (var));
13793 gimple_add_tmp_var (t);
13794 TREE_ADDRESSABLE (t) = 1;
13795 gimple_seq_add_stmt (&new_body,
13796 gimple_build_assign (t, x));
13797 x = build_fold_addr_expr_loc (clause_loc, t);
13799 gimple_seq_add_stmt (&new_body,
13800 gimple_build_assign (new_var, x));
13801 prev = NULL_TREE;
13803 else if (OMP_CLAUSE_CHAIN (c)
13804 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
13805 == OMP_CLAUSE_MAP
13806 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13807 == GOMP_MAP_FIRSTPRIVATE_POINTER
13808 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13809 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13810 prev = c;
13811 break;
13812 case OMP_CLAUSE_PRIVATE:
13813 var = OMP_CLAUSE_DECL (c);
13814 if (is_variable_sized (var))
13816 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13817 tree new_var = lookup_decl (var, ctx);
13818 tree pvar = DECL_VALUE_EXPR (var);
13819 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13820 pvar = TREE_OPERAND (pvar, 0);
13821 gcc_assert (DECL_P (pvar));
13822 tree new_pvar = lookup_decl (pvar, ctx);
13823 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13824 tree al = size_int (DECL_ALIGN (var));
13825 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
13826 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13827 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
13828 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13829 gimple_seq_add_stmt (&new_body,
13830 gimple_build_assign (new_pvar, x));
13832 else if (omp_privatize_by_reference (var)
13833 && !is_gimple_omp_oacc (ctx->stmt))
13835 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13836 tree new_var = lookup_decl (var, ctx);
13837 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13838 if (TREE_CONSTANT (x))
13839 break;
13840 else
13842 tree atmp
13843 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13844 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
13845 tree al = size_int (TYPE_ALIGN (rtype));
13846 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13849 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13850 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13851 gimple_seq_add_stmt (&new_body,
13852 gimple_build_assign (new_var, x));
13854 break;
13857 gimple_seq fork_seq = NULL;
13858 gimple_seq join_seq = NULL;
13860 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
13862 /* If there are reductions on the offloaded region itself, treat
13863 them as a dummy GANG loop. */
13864 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
13866 gcall *private_marker = lower_oacc_private_marker (ctx);
13868 if (private_marker)
13869 gimple_call_set_arg (private_marker, 2, level);
13871 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
13872 false, NULL, private_marker, NULL, &fork_seq,
13873 &join_seq, ctx);
13876 gimple_seq_add_seq (&new_body, fork_seq);
13877 gimple_seq_add_seq (&new_body, tgt_body);
13878 gimple_seq_add_seq (&new_body, join_seq);
13880 if (offloaded)
13882 new_body = maybe_catch_exception (new_body);
13883 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
13885 gimple_omp_set_body (stmt, new_body);
13888 bind = gimple_build_bind (NULL, NULL,
13889 tgt_bind ? gimple_bind_block (tgt_bind)
13890 : NULL_TREE);
13891 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
13892 gimple_bind_add_seq (bind, ilist);
13893 gimple_bind_add_stmt (bind, stmt);
13894 gimple_bind_add_seq (bind, olist);
13896 pop_gimplify_context (NULL);
13898 if (dep_bind)
13900 gimple_bind_add_seq (dep_bind, dep_ilist);
13901 gimple_bind_add_stmt (dep_bind, bind);
13902 gimple_bind_add_seq (dep_bind, dep_olist);
13903 pop_gimplify_context (dep_bind);
13907 /* Expand code for an OpenMP teams directive. */
13909 static void
13910 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13912 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
13913 push_gimplify_context ();
13915 tree block = make_node (BLOCK);
13916 gbind *bind = gimple_build_bind (NULL, NULL, block);
13917 gsi_replace (gsi_p, bind, true);
13918 gimple_seq bind_body = NULL;
13919 gimple_seq dlist = NULL;
13920 gimple_seq olist = NULL;
13922 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13923 OMP_CLAUSE_NUM_TEAMS);
13924 tree num_teams_lower = NULL_TREE;
13925 if (num_teams == NULL_TREE)
13926 num_teams = build_int_cst (unsigned_type_node, 0);
13927 else
13929 num_teams_lower = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams);
13930 if (num_teams_lower)
13932 num_teams_lower = fold_convert (unsigned_type_node, num_teams_lower);
13933 gimplify_expr (&num_teams_lower, &bind_body, NULL, is_gimple_val,
13934 fb_rvalue);
13936 num_teams = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams);
13937 num_teams = fold_convert (unsigned_type_node, num_teams);
13938 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
13940 if (num_teams_lower == NULL_TREE)
13941 num_teams_lower = num_teams;
13942 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13943 OMP_CLAUSE_THREAD_LIMIT);
13944 if (thread_limit == NULL_TREE)
13945 thread_limit = build_int_cst (unsigned_type_node, 0);
13946 else
13948 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
13949 thread_limit = fold_convert (unsigned_type_node, thread_limit);
13950 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
13951 fb_rvalue);
13953 location_t loc = gimple_location (teams_stmt);
13954 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4);
13955 tree rettype = TREE_TYPE (TREE_TYPE (decl));
13956 tree first = create_tmp_var (rettype);
13957 gimple_seq_add_stmt (&bind_body,
13958 gimple_build_assign (first, build_one_cst (rettype)));
13959 tree llabel = create_artificial_label (loc);
13960 gimple_seq_add_stmt (&bind_body, gimple_build_label (llabel));
13961 gimple *call
13962 = gimple_build_call (decl, 4, num_teams_lower, num_teams, thread_limit,
13963 first);
13964 gimple_set_location (call, loc);
13965 tree temp = create_tmp_var (rettype);
13966 gimple_call_set_lhs (call, temp);
13967 gimple_seq_add_stmt (&bind_body, call);
13969 tree tlabel = create_artificial_label (loc);
13970 tree flabel = create_artificial_label (loc);
13971 gimple *cond = gimple_build_cond (NE_EXPR, temp, build_zero_cst (rettype),
13972 tlabel, flabel);
13973 gimple_seq_add_stmt (&bind_body, cond);
13974 gimple_seq_add_stmt (&bind_body, gimple_build_label (tlabel));
13975 gimple_seq_add_stmt (&bind_body,
13976 gimple_build_assign (first, build_zero_cst (rettype)));
13978 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
13979 &bind_body, &dlist, ctx, NULL);
13980 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
13981 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
13982 NULL, ctx);
13983 gimple_seq_add_stmt (&bind_body, teams_stmt);
13985 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
13986 gimple_omp_set_body (teams_stmt, NULL);
13987 gimple_seq_add_seq (&bind_body, olist);
13988 gimple_seq_add_seq (&bind_body, dlist);
13989 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
13990 gimple_seq_add_stmt (&bind_body, gimple_build_goto (llabel));
13991 gimple_seq_add_stmt (&bind_body, gimple_build_label (flabel));
13992 gimple_bind_set_body (bind, bind_body);
13994 pop_gimplify_context (bind);
13996 gimple_bind_append_vars (bind, ctx->block_vars);
13997 BLOCK_VARS (block) = ctx->block_vars;
13998 if (BLOCK_VARS (block))
13999 TREE_USED (block) = 1;
14002 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14003 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14004 of OMP context, but with task_shared_vars set. */
14006 static tree
14007 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
14008 void *data)
14010 tree t = *tp;
14012 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14013 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
14014 && data == NULL
14015 && DECL_HAS_VALUE_EXPR_P (t))
14016 return t;
14018 if (task_shared_vars
14019 && DECL_P (t)
14020 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
14021 return t;
14023 /* If a global variable has been privatized, TREE_CONSTANT on
14024 ADDR_EXPR might be wrong. */
14025 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
14026 recompute_tree_invariant_for_addr_expr (t);
14028 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
14029 return NULL_TREE;
14032 /* Data to be communicated between lower_omp_regimplify_operands and
14033 lower_omp_regimplify_operands_p. */
14035 struct lower_omp_regimplify_operands_data
14037 omp_context *ctx;
14038 vec<tree> *decls;
14041 /* Helper function for lower_omp_regimplify_operands. Find
14042 omp_member_access_dummy_var vars and adjust temporarily their
14043 DECL_VALUE_EXPRs if needed. */
14045 static tree
14046 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
14047 void *data)
14049 tree t = omp_member_access_dummy_var (*tp);
14050 if (t)
14052 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
14053 lower_omp_regimplify_operands_data *ldata
14054 = (lower_omp_regimplify_operands_data *) wi->info;
14055 tree o = maybe_lookup_decl (t, ldata->ctx);
14056 if (o != t)
14058 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
14059 ldata->decls->safe_push (*tp);
14060 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
14061 SET_DECL_VALUE_EXPR (*tp, v);
14064 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
14065 return NULL_TREE;
14068 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14069 of omp_member_access_dummy_var vars during regimplification. */
14071 static void
14072 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
14073 gimple_stmt_iterator *gsi_p)
14075 auto_vec<tree, 10> decls;
14076 if (ctx)
14078 struct walk_stmt_info wi;
14079 memset (&wi, '\0', sizeof (wi));
14080 struct lower_omp_regimplify_operands_data data;
14081 data.ctx = ctx;
14082 data.decls = &decls;
14083 wi.info = &data;
14084 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14086 gimple_regimplify_operands (stmt, gsi_p);
14087 while (!decls.is_empty ())
14089 tree t = decls.pop ();
14090 tree v = decls.pop ();
14091 SET_DECL_VALUE_EXPR (t, v);
14095 static void
14096 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14098 gimple *stmt = gsi_stmt (*gsi_p);
14099 struct walk_stmt_info wi;
14100 gcall *call_stmt;
14102 if (gimple_has_location (stmt))
14103 input_location = gimple_location (stmt);
14105 if (task_shared_vars)
14106 memset (&wi, '\0', sizeof (wi));
14108 /* If we have issued syntax errors, avoid doing any heavy lifting.
14109 Just replace the OMP directives with a NOP to avoid
14110 confusing RTL expansion. */
14111 if (seen_error () && is_gimple_omp (stmt))
14113 gsi_replace (gsi_p, gimple_build_nop (), true);
14114 return;
14117 switch (gimple_code (stmt))
14119 case GIMPLE_COND:
14121 gcond *cond_stmt = as_a <gcond *> (stmt);
14122 if ((ctx || task_shared_vars)
14123 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14124 lower_omp_regimplify_p,
14125 ctx ? NULL : &wi, NULL)
14126 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14127 lower_omp_regimplify_p,
14128 ctx ? NULL : &wi, NULL)))
14129 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14131 break;
14132 case GIMPLE_CATCH:
14133 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14134 break;
14135 case GIMPLE_EH_FILTER:
14136 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14137 break;
14138 case GIMPLE_TRY:
14139 lower_omp (gimple_try_eval_ptr (stmt), ctx);
14140 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14141 break;
14142 case GIMPLE_TRANSACTION:
14143 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14144 ctx);
14145 break;
14146 case GIMPLE_BIND:
14147 if (ctx && is_gimple_omp_oacc (ctx->stmt))
14149 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14150 oacc_privatization_scan_decl_chain (ctx, vars);
14152 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14153 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14154 break;
14155 case GIMPLE_OMP_PARALLEL:
14156 case GIMPLE_OMP_TASK:
14157 ctx = maybe_lookup_ctx (stmt);
14158 gcc_assert (ctx);
14159 if (ctx->cancellable)
14160 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14161 lower_omp_taskreg (gsi_p, ctx);
14162 break;
14163 case GIMPLE_OMP_FOR:
14164 ctx = maybe_lookup_ctx (stmt);
14165 gcc_assert (ctx);
14166 if (ctx->cancellable)
14167 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14168 lower_omp_for (gsi_p, ctx);
14169 break;
14170 case GIMPLE_OMP_SECTIONS:
14171 ctx = maybe_lookup_ctx (stmt);
14172 gcc_assert (ctx);
14173 if (ctx->cancellable)
14174 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14175 lower_omp_sections (gsi_p, ctx);
14176 break;
14177 case GIMPLE_OMP_SCOPE:
14178 ctx = maybe_lookup_ctx (stmt);
14179 gcc_assert (ctx);
14180 lower_omp_scope (gsi_p, ctx);
14181 break;
14182 case GIMPLE_OMP_SINGLE:
14183 ctx = maybe_lookup_ctx (stmt);
14184 gcc_assert (ctx);
14185 lower_omp_single (gsi_p, ctx);
14186 break;
14187 case GIMPLE_OMP_MASTER:
14188 case GIMPLE_OMP_MASKED:
14189 ctx = maybe_lookup_ctx (stmt);
14190 gcc_assert (ctx);
14191 lower_omp_master (gsi_p, ctx);
14192 break;
14193 case GIMPLE_OMP_TASKGROUP:
14194 ctx = maybe_lookup_ctx (stmt);
14195 gcc_assert (ctx);
14196 lower_omp_taskgroup (gsi_p, ctx);
14197 break;
14198 case GIMPLE_OMP_ORDERED:
14199 ctx = maybe_lookup_ctx (stmt);
14200 gcc_assert (ctx);
14201 lower_omp_ordered (gsi_p, ctx);
14202 break;
14203 case GIMPLE_OMP_SCAN:
14204 ctx = maybe_lookup_ctx (stmt);
14205 gcc_assert (ctx);
14206 lower_omp_scan (gsi_p, ctx);
14207 break;
14208 case GIMPLE_OMP_CRITICAL:
14209 ctx = maybe_lookup_ctx (stmt);
14210 gcc_assert (ctx);
14211 lower_omp_critical (gsi_p, ctx);
14212 break;
14213 case GIMPLE_OMP_ATOMIC_LOAD:
14214 if ((ctx || task_shared_vars)
14215 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14216 as_a <gomp_atomic_load *> (stmt)),
14217 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14218 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14219 break;
14220 case GIMPLE_OMP_TARGET:
14221 ctx = maybe_lookup_ctx (stmt);
14222 gcc_assert (ctx);
14223 lower_omp_target (gsi_p, ctx);
14224 break;
14225 case GIMPLE_OMP_TEAMS:
14226 ctx = maybe_lookup_ctx (stmt);
14227 gcc_assert (ctx);
14228 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14229 lower_omp_taskreg (gsi_p, ctx);
14230 else
14231 lower_omp_teams (gsi_p, ctx);
14232 break;
14233 case GIMPLE_CALL:
14234 tree fndecl;
14235 call_stmt = as_a <gcall *> (stmt);
14236 fndecl = gimple_call_fndecl (call_stmt);
14237 if (fndecl
14238 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14239 switch (DECL_FUNCTION_CODE (fndecl))
14241 case BUILT_IN_GOMP_BARRIER:
14242 if (ctx == NULL)
14243 break;
14244 /* FALLTHRU */
14245 case BUILT_IN_GOMP_CANCEL:
14246 case BUILT_IN_GOMP_CANCELLATION_POINT:
14247 omp_context *cctx;
14248 cctx = ctx;
14249 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14250 cctx = cctx->outer;
14251 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14252 if (!cctx->cancellable)
14254 if (DECL_FUNCTION_CODE (fndecl)
14255 == BUILT_IN_GOMP_CANCELLATION_POINT)
14257 stmt = gimple_build_nop ();
14258 gsi_replace (gsi_p, stmt, false);
14260 break;
14262 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14264 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14265 gimple_call_set_fndecl (call_stmt, fndecl);
14266 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14268 tree lhs;
14269 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14270 gimple_call_set_lhs (call_stmt, lhs);
14271 tree fallthru_label;
14272 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14273 gimple *g;
14274 g = gimple_build_label (fallthru_label);
14275 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14276 g = gimple_build_cond (NE_EXPR, lhs,
14277 fold_convert (TREE_TYPE (lhs),
14278 boolean_false_node),
14279 cctx->cancel_label, fallthru_label);
14280 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14281 break;
14282 default:
14283 break;
14285 goto regimplify;
14287 case GIMPLE_ASSIGN:
14288 for (omp_context *up = ctx; up; up = up->outer)
14290 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14291 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14292 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14293 || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14294 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14295 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14296 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14297 && (gimple_omp_target_kind (up->stmt)
14298 == GF_OMP_TARGET_KIND_DATA)))
14299 continue;
14300 else if (!up->lastprivate_conditional_map)
14301 break;
14302 tree lhs = get_base_address (gimple_assign_lhs (stmt));
14303 if (TREE_CODE (lhs) == MEM_REF
14304 && DECL_P (TREE_OPERAND (lhs, 0))
14305 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14306 0))) == REFERENCE_TYPE)
14307 lhs = TREE_OPERAND (lhs, 0);
14308 if (DECL_P (lhs))
14309 if (tree *v = up->lastprivate_conditional_map->get (lhs))
14311 tree clauses;
14312 if (up->combined_into_simd_safelen1)
14314 up = up->outer;
14315 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14316 up = up->outer;
14318 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14319 clauses = gimple_omp_for_clauses (up->stmt);
14320 else
14321 clauses = gimple_omp_sections_clauses (up->stmt);
14322 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14323 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14324 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14325 OMP_CLAUSE__CONDTEMP_);
14326 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14327 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14328 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14331 /* FALLTHRU */
14333 default:
14334 regimplify:
14335 if ((ctx || task_shared_vars)
14336 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14337 ctx ? NULL : &wi))
14339 /* Just remove clobbers, this should happen only if we have
14340 "privatized" local addressable variables in SIMD regions,
14341 the clobber isn't needed in that case and gimplifying address
14342 of the ARRAY_REF into a pointer and creating MEM_REF based
14343 clobber would create worse code than we get with the clobber
14344 dropped. */
14345 if (gimple_clobber_p (stmt))
14347 gsi_replace (gsi_p, gimple_build_nop (), true);
14348 break;
14350 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14352 break;
14356 static void
14357 lower_omp (gimple_seq *body, omp_context *ctx)
14359 location_t saved_location = input_location;
14360 gimple_stmt_iterator gsi;
14361 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14362 lower_omp_1 (&gsi, ctx);
14363 /* During gimplification, we haven't folded statments inside offloading
14364 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
14365 if (target_nesting_level || taskreg_nesting_level)
14366 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14367 fold_stmt (&gsi);
14368 input_location = saved_location;
14371 /* Main entry point. */
14373 static unsigned int
14374 execute_lower_omp (void)
14376 gimple_seq body;
14377 int i;
14378 omp_context *ctx;
14380 /* This pass always runs, to provide PROP_gimple_lomp.
14381 But often, there is nothing to do. */
14382 if (flag_openacc == 0 && flag_openmp == 0
14383 && flag_openmp_simd == 0)
14384 return 0;
14386 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14387 delete_omp_context);
14389 body = gimple_body (current_function_decl);
14391 scan_omp (&body, NULL);
14392 gcc_assert (taskreg_nesting_level == 0);
14393 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14394 finish_taskreg_scan (ctx);
14395 taskreg_contexts.release ();
14397 if (all_contexts->root)
14399 if (task_shared_vars)
14400 push_gimplify_context ();
14401 lower_omp (&body, NULL);
14402 if (task_shared_vars)
14403 pop_gimplify_context (NULL);
14406 if (all_contexts)
14408 splay_tree_delete (all_contexts);
14409 all_contexts = NULL;
14411 BITMAP_FREE (task_shared_vars);
14412 BITMAP_FREE (global_nonaddressable_vars);
14414 /* If current function is a method, remove artificial dummy VAR_DECL created
14415 for non-static data member privatization, they aren't needed for
14416 debuginfo nor anything else, have been already replaced everywhere in the
14417 IL and cause problems with LTO. */
14418 if (DECL_ARGUMENTS (current_function_decl)
14419 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14420 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14421 == POINTER_TYPE))
14422 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14423 return 0;
14426 namespace {
14428 const pass_data pass_data_lower_omp =
14430 GIMPLE_PASS, /* type */
14431 "omplower", /* name */
14432 OPTGROUP_OMP, /* optinfo_flags */
14433 TV_NONE, /* tv_id */
14434 PROP_gimple_any, /* properties_required */
14435 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14436 0, /* properties_destroyed */
14437 0, /* todo_flags_start */
14438 0, /* todo_flags_finish */
14441 class pass_lower_omp : public gimple_opt_pass
14443 public:
14444 pass_lower_omp (gcc::context *ctxt)
14445 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14448 /* opt_pass methods: */
14449 virtual unsigned int execute (function *) { return execute_lower_omp (); }
14451 }; // class pass_lower_omp
14453 } // anon namespace
14455 gimple_opt_pass *
14456 make_pass_lower_omp (gcc::context *ctxt)
14458 return new pass_lower_omp (ctxt);
14461 /* The following is a utility to diagnose structured block violations.
14462 It is not part of the "omplower" pass, as that's invoked too late. It
14463 should be invoked by the respective front ends after gimplification. */
14465 static splay_tree all_labels;
14467 /* Check for mismatched contexts and generate an error if needed. Return
14468 true if an error is detected. */
14470 static bool
14471 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14472 gimple *branch_ctx, gimple *label_ctx)
14474 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14475 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14477 if (label_ctx == branch_ctx)
14478 return false;
14480 const char* kind = NULL;
14482 if (flag_openacc)
14484 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14485 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14487 gcc_checking_assert (kind == NULL);
14488 kind = "OpenACC";
14491 if (kind == NULL)
14493 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14494 kind = "OpenMP";
14497 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14498 so we could traverse it and issue a correct "exit" or "enter" error
14499 message upon a structured block violation.
14501 We built the context by building a list with tree_cons'ing, but there is
14502 no easy counterpart in gimple tuples. It seems like far too much work
14503 for issuing exit/enter error messages. If someone really misses the
14504 distinct error message... patches welcome. */
14506 #if 0
14507 /* Try to avoid confusing the user by producing and error message
14508 with correct "exit" or "enter" verbiage. We prefer "exit"
14509 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14510 if (branch_ctx == NULL)
14511 exit_p = false;
14512 else
14514 while (label_ctx)
14516 if (TREE_VALUE (label_ctx) == branch_ctx)
14518 exit_p = false;
14519 break;
14521 label_ctx = TREE_CHAIN (label_ctx);
14525 if (exit_p)
14526 error ("invalid exit from %s structured block", kind);
14527 else
14528 error ("invalid entry to %s structured block", kind);
14529 #endif
14531 /* If it's obvious we have an invalid entry, be specific about the error. */
14532 if (branch_ctx == NULL)
14533 error ("invalid entry to %s structured block", kind);
14534 else
14536 /* Otherwise, be vague and lazy, but efficient. */
14537 error ("invalid branch to/from %s structured block", kind);
14540 gsi_replace (gsi_p, gimple_build_nop (), false);
14541 return true;
14544 /* Pass 1: Create a minimal tree of structured blocks, and record
14545 where each label is found. */
14547 static tree
14548 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14549 struct walk_stmt_info *wi)
14551 gimple *context = (gimple *) wi->info;
14552 gimple *inner_context;
14553 gimple *stmt = gsi_stmt (*gsi_p);
14555 *handled_ops_p = true;
14557 switch (gimple_code (stmt))
14559 WALK_SUBSTMTS;
14561 case GIMPLE_OMP_PARALLEL:
14562 case GIMPLE_OMP_TASK:
14563 case GIMPLE_OMP_SCOPE:
14564 case GIMPLE_OMP_SECTIONS:
14565 case GIMPLE_OMP_SINGLE:
14566 case GIMPLE_OMP_SECTION:
14567 case GIMPLE_OMP_MASTER:
14568 case GIMPLE_OMP_MASKED:
14569 case GIMPLE_OMP_ORDERED:
14570 case GIMPLE_OMP_SCAN:
14571 case GIMPLE_OMP_CRITICAL:
14572 case GIMPLE_OMP_TARGET:
14573 case GIMPLE_OMP_TEAMS:
14574 case GIMPLE_OMP_TASKGROUP:
14575 /* The minimal context here is just the current OMP construct. */
14576 inner_context = stmt;
14577 wi->info = inner_context;
14578 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14579 wi->info = context;
14580 break;
14582 case GIMPLE_OMP_FOR:
14583 inner_context = stmt;
14584 wi->info = inner_context;
14585 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14586 walk them. */
14587 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14588 diagnose_sb_1, NULL, wi);
14589 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14590 wi->info = context;
14591 break;
14593 case GIMPLE_LABEL:
14594 splay_tree_insert (all_labels,
14595 (splay_tree_key) gimple_label_label (
14596 as_a <glabel *> (stmt)),
14597 (splay_tree_value) context);
14598 break;
14600 default:
14601 break;
14604 return NULL_TREE;
14607 /* Pass 2: Check each branch and see if its context differs from that of
14608 the destination label's context. */
14610 static tree
14611 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14612 struct walk_stmt_info *wi)
14614 gimple *context = (gimple *) wi->info;
14615 splay_tree_node n;
14616 gimple *stmt = gsi_stmt (*gsi_p);
14618 *handled_ops_p = true;
14620 switch (gimple_code (stmt))
14622 WALK_SUBSTMTS;
14624 case GIMPLE_OMP_PARALLEL:
14625 case GIMPLE_OMP_TASK:
14626 case GIMPLE_OMP_SCOPE:
14627 case GIMPLE_OMP_SECTIONS:
14628 case GIMPLE_OMP_SINGLE:
14629 case GIMPLE_OMP_SECTION:
14630 case GIMPLE_OMP_MASTER:
14631 case GIMPLE_OMP_MASKED:
14632 case GIMPLE_OMP_ORDERED:
14633 case GIMPLE_OMP_SCAN:
14634 case GIMPLE_OMP_CRITICAL:
14635 case GIMPLE_OMP_TARGET:
14636 case GIMPLE_OMP_TEAMS:
14637 case GIMPLE_OMP_TASKGROUP:
14638 wi->info = stmt;
14639 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14640 wi->info = context;
14641 break;
14643 case GIMPLE_OMP_FOR:
14644 wi->info = stmt;
14645 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14646 walk them. */
14647 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14648 diagnose_sb_2, NULL, wi);
14649 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14650 wi->info = context;
14651 break;
14653 case GIMPLE_COND:
14655 gcond *cond_stmt = as_a <gcond *> (stmt);
14656 tree lab = gimple_cond_true_label (cond_stmt);
14657 if (lab)
14659 n = splay_tree_lookup (all_labels,
14660 (splay_tree_key) lab);
14661 diagnose_sb_0 (gsi_p, context,
14662 n ? (gimple *) n->value : NULL);
14664 lab = gimple_cond_false_label (cond_stmt);
14665 if (lab)
14667 n = splay_tree_lookup (all_labels,
14668 (splay_tree_key) lab);
14669 diagnose_sb_0 (gsi_p, context,
14670 n ? (gimple *) n->value : NULL);
14673 break;
14675 case GIMPLE_GOTO:
14677 tree lab = gimple_goto_dest (stmt);
14678 if (TREE_CODE (lab) != LABEL_DECL)
14679 break;
14681 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14682 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14684 break;
14686 case GIMPLE_SWITCH:
14688 gswitch *switch_stmt = as_a <gswitch *> (stmt);
14689 unsigned int i;
14690 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
14692 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
14693 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14694 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
14695 break;
14698 break;
14700 case GIMPLE_RETURN:
14701 diagnose_sb_0 (gsi_p, context, NULL);
14702 break;
14704 default:
14705 break;
14708 return NULL_TREE;
14711 static unsigned int
14712 diagnose_omp_structured_block_errors (void)
14714 struct walk_stmt_info wi;
14715 gimple_seq body = gimple_body (current_function_decl);
14717 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
14719 memset (&wi, 0, sizeof (wi));
14720 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
14722 memset (&wi, 0, sizeof (wi));
14723 wi.want_locations = true;
14724 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
14726 gimple_set_body (current_function_decl, body);
14728 splay_tree_delete (all_labels);
14729 all_labels = NULL;
14731 return 0;
14734 namespace {
14736 const pass_data pass_data_diagnose_omp_blocks =
14738 GIMPLE_PASS, /* type */
14739 "*diagnose_omp_blocks", /* name */
14740 OPTGROUP_OMP, /* optinfo_flags */
14741 TV_NONE, /* tv_id */
14742 PROP_gimple_any, /* properties_required */
14743 0, /* properties_provided */
14744 0, /* properties_destroyed */
14745 0, /* todo_flags_start */
14746 0, /* todo_flags_finish */
14749 class pass_diagnose_omp_blocks : public gimple_opt_pass
14751 public:
14752 pass_diagnose_omp_blocks (gcc::context *ctxt)
14753 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
14756 /* opt_pass methods: */
14757 virtual bool gate (function *)
14759 return flag_openacc || flag_openmp || flag_openmp_simd;
14761 virtual unsigned int execute (function *)
14763 return diagnose_omp_structured_block_errors ();
14766 }; // class pass_diagnose_omp_blocks
14768 } // anon namespace
14770 gimple_opt_pass *
14771 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
14773 return new pass_diagnose_omp_blocks (ctxt);
14777 #include "gt-omp-low.h"